PR ipa/64481
[official-gcc.git] / gcc / cfgrtl.c
blobf20fa7a69a9688c4d1225fae8520afa0630b202d
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "hash-set.h"
45 #include "machmode.h"
46 #include "vec.h"
47 #include "double-int.h"
48 #include "input.h"
49 #include "alias.h"
50 #include "symtab.h"
51 #include "wide-int.h"
52 #include "inchash.h"
53 #include "tree.h"
54 #include "hard-reg-set.h"
55 #include "predict.h"
56 #include "hashtab.h"
57 #include "function.h"
58 #include "dominance.h"
59 #include "cfg.h"
60 #include "cfgrtl.h"
61 #include "cfganal.h"
62 #include "cfgbuild.h"
63 #include "cfgcleanup.h"
64 #include "basic-block.h"
65 #include "bb-reorder.h"
66 #include "regs.h"
67 #include "flags.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "tm_p.h"
71 #include "obstack.h"
72 #include "insn-attr.h"
73 #include "insn-config.h"
74 #include "expr.h"
75 #include "target.h"
76 #include "common/common-target.h"
77 #include "cfgloop.h"
78 #include "ggc.h"
79 #include "tree-pass.h"
80 #include "df.h"
82 /* Holds the interesting leading and trailing notes for the function.
83 Only applicable if the CFG is in cfglayout mode. */
84 static GTY(()) rtx_insn *cfg_layout_function_footer;
85 static GTY(()) rtx_insn *cfg_layout_function_header;
87 static rtx_insn *skip_insns_after_block (basic_block);
88 static void record_effective_endpoints (void);
89 static rtx label_for_bb (basic_block);
90 static void fixup_reorder_chain (void);
92 void verify_insn_chain (void);
93 static void fixup_fallthru_exit_predecessor (void);
94 static int can_delete_note_p (const rtx_note *);
95 static int can_delete_label_p (const rtx_code_label *);
96 static basic_block rtl_split_edge (edge);
97 static bool rtl_move_block_after (basic_block, basic_block);
98 static int rtl_verify_flow_info (void);
99 static basic_block cfg_layout_split_block (basic_block, void *);
100 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
101 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
102 static void cfg_layout_delete_block (basic_block);
103 static void rtl_delete_block (basic_block);
104 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
105 static edge rtl_redirect_edge_and_branch (edge, basic_block);
106 static basic_block rtl_split_block (basic_block, void *);
107 static void rtl_dump_bb (FILE *, basic_block, int, int);
108 static int rtl_verify_flow_info_1 (void);
109 static void rtl_make_forwarder_block (edge);
111 /* Return true if NOTE is not one of the ones that must be kept paired,
112 so that we may simply delete it. */
114 static int
115 can_delete_note_p (const rtx_note *note)
117 switch (NOTE_KIND (note))
119 case NOTE_INSN_DELETED:
120 case NOTE_INSN_BASIC_BLOCK:
121 case NOTE_INSN_EPILOGUE_BEG:
122 return true;
124 default:
125 return false;
129 /* True if a given label can be deleted. */
131 static int
132 can_delete_label_p (const rtx_code_label *label)
134 return (!LABEL_PRESERVE_P (label)
135 /* User declared labels must be preserved. */
136 && LABEL_NAME (label) == 0
137 && !in_expr_list_p (forced_labels, label));
140 /* Delete INSN by patching it out. */
142 void
143 delete_insn (rtx uncast_insn)
145 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
146 rtx note;
147 bool really_delete = true;
149 if (LABEL_P (insn))
151 /* Some labels can't be directly removed from the INSN chain, as they
152 might be references via variables, constant pool etc.
153 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
154 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
156 const char *name = LABEL_NAME (insn);
157 basic_block bb = BLOCK_FOR_INSN (insn);
158 rtx_insn *bb_note = NEXT_INSN (insn);
160 really_delete = false;
161 PUT_CODE (insn, NOTE);
162 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
163 NOTE_DELETED_LABEL_NAME (insn) = name;
165 /* If the note following the label starts a basic block, and the
166 label is a member of the same basic block, interchange the two. */
167 if (bb_note != NULL_RTX
168 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
169 && bb != NULL
170 && bb == BLOCK_FOR_INSN (bb_note))
172 reorder_insns_nobb (insn, insn, bb_note);
173 BB_HEAD (bb) = bb_note;
174 if (BB_END (bb) == bb_note)
175 BB_END (bb) = insn;
179 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
182 if (really_delete)
184 /* If this insn has already been deleted, something is very wrong. */
185 gcc_assert (!insn->deleted ());
186 if (INSN_P (insn))
187 df_insn_delete (insn);
188 remove_insn (insn);
189 insn->set_deleted ();
192 /* If deleting a jump, decrement the use count of the label. Deleting
193 the label itself should happen in the normal course of block merging. */
194 if (JUMP_P (insn))
196 if (JUMP_LABEL (insn)
197 && LABEL_P (JUMP_LABEL (insn)))
198 LABEL_NUSES (JUMP_LABEL (insn))--;
200 /* If there are more targets, remove them too. */
201 while ((note
202 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
203 && LABEL_P (XEXP (note, 0)))
205 LABEL_NUSES (XEXP (note, 0))--;
206 remove_note (insn, note);
210 /* Also if deleting any insn that references a label as an operand. */
211 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
212 && LABEL_P (XEXP (note, 0)))
214 LABEL_NUSES (XEXP (note, 0))--;
215 remove_note (insn, note);
218 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
220 rtvec vec = table->get_labels ();
221 int len = GET_NUM_ELEM (vec);
222 int i;
224 for (i = 0; i < len; i++)
226 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
228 /* When deleting code in bulk (e.g. removing many unreachable
229 blocks) we can delete a label that's a target of the vector
230 before deleting the vector itself. */
231 if (!NOTE_P (label))
232 LABEL_NUSES (label)--;
237 /* Like delete_insn but also purge dead edges from BB. */
239 void
240 delete_insn_and_edges (rtx_insn *insn)
242 bool purge = false;
244 if (INSN_P (insn)
245 && BLOCK_FOR_INSN (insn)
246 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
247 purge = true;
248 delete_insn (insn);
249 if (purge)
250 purge_dead_edges (BLOCK_FOR_INSN (insn));
253 /* Unlink a chain of insns between START and FINISH, leaving notes
254 that must be paired. If CLEAR_BB is true, we set bb field for
255 insns that cannot be removed to NULL. */
257 void
258 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
260 rtx_insn *prev, *current;
262 /* Unchain the insns one by one. It would be quicker to delete all of these
263 with a single unchaining, rather than one at a time, but we need to keep
264 the NOTE's. */
265 current = safe_as_a <rtx_insn *> (finish);
266 while (1)
268 prev = PREV_INSN (current);
269 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
271 else
272 delete_insn (current);
274 if (clear_bb && !current->deleted ())
275 set_block_for_insn (current, NULL);
277 if (current == start)
278 break;
279 current = prev;
283 /* Create a new basic block consisting of the instructions between HEAD and END
284 inclusive. This function is designed to allow fast BB construction - reuses
285 the note and basic block struct in BB_NOTE, if any and do not grow
286 BASIC_BLOCK chain and should be used directly only by CFG construction code.
287 END can be NULL in to create new empty basic block before HEAD. Both END
288 and HEAD can be NULL to create basic block at the end of INSN chain.
289 AFTER is the basic block we should be put after. */
291 basic_block
292 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
293 basic_block after)
295 basic_block bb;
297 if (bb_note
298 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
299 && bb->aux == NULL)
301 /* If we found an existing note, thread it back onto the chain. */
303 rtx_insn *after;
305 if (LABEL_P (head))
306 after = head;
307 else
309 after = PREV_INSN (head);
310 head = bb_note;
313 if (after != bb_note && NEXT_INSN (after) != bb_note)
314 reorder_insns_nobb (bb_note, bb_note, after);
316 else
318 /* Otherwise we must create a note and a basic block structure. */
320 bb = alloc_block ();
322 init_rtl_bb_info (bb);
323 if (!head && !end)
324 head = end = bb_note
325 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
326 else if (LABEL_P (head) && end)
328 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
329 if (head == end)
330 end = bb_note;
332 else
334 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
335 head = bb_note;
336 if (!end)
337 end = head;
340 NOTE_BASIC_BLOCK (bb_note) = bb;
343 /* Always include the bb note in the block. */
344 if (NEXT_INSN (end) == bb_note)
345 end = bb_note;
347 BB_HEAD (bb) = head;
348 BB_END (bb) = end;
349 bb->index = last_basic_block_for_fn (cfun)++;
350 bb->flags = BB_NEW | BB_RTL;
351 link_block (bb, after);
352 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
353 df_bb_refs_record (bb->index, false);
354 update_bb_for_insn (bb);
355 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
357 /* Tag the block so that we know it has been used when considering
358 other basic block notes. */
359 bb->aux = bb;
361 return bb;
364 /* Create new basic block consisting of instructions in between HEAD and END
365 and place it to the BB chain after block AFTER. END can be NULL to
366 create a new empty basic block before HEAD. Both END and HEAD can be
367 NULL to create basic block at the end of INSN chain. */
369 static basic_block
370 rtl_create_basic_block (void *headp, void *endp, basic_block after)
372 rtx_insn *head = (rtx_insn *) headp;
373 rtx_insn *end = (rtx_insn *) endp;
374 basic_block bb;
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block_for_fn (cfun)
378 >= basic_block_info_for_fn (cfun)->length ())
380 size_t new_size =
381 (last_basic_block_for_fn (cfun)
382 + (last_basic_block_for_fn (cfun) + 3) / 4);
383 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
386 n_basic_blocks_for_fn (cfun)++;
388 bb = create_basic_block_structure (head, end, NULL, after);
389 bb->aux = NULL;
390 return bb;
393 static basic_block
394 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
396 basic_block newbb = rtl_create_basic_block (head, end, after);
398 return newbb;
401 /* Delete the insns in a (non-live) block. We physically delete every
402 non-deleted-note insn, and update the flow graph appropriately.
404 Return nonzero if we deleted an exception handler. */
406 /* ??? Preserving all such notes strikes me as wrong. It would be nice
407 to post-process the stream to remove empty blocks, loops, ranges, etc. */
409 static void
410 rtl_delete_block (basic_block b)
412 rtx_insn *insn, *end;
414 /* If the head of this block is a CODE_LABEL, then it might be the
415 label for an exception handler which can't be reached. We need
416 to remove the label from the exception_handler_label list. */
417 insn = BB_HEAD (b);
419 end = get_last_bb_insn (b);
421 /* Selectively delete the entire chain. */
422 BB_HEAD (b) = NULL;
423 delete_insn_chain (insn, end, true);
426 if (dump_file)
427 fprintf (dump_file, "deleting block %d\n", b->index);
428 df_bb_delete (b->index);
431 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
433 void
434 compute_bb_for_insn (void)
436 basic_block bb;
438 FOR_EACH_BB_FN (bb, cfun)
440 rtx_insn *end = BB_END (bb);
441 rtx_insn *insn;
443 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
445 BLOCK_FOR_INSN (insn) = bb;
446 if (insn == end)
447 break;
452 /* Release the basic_block_for_insn array. */
454 unsigned int
455 free_bb_for_insn (void)
457 rtx_insn *insn;
458 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
459 if (!BARRIER_P (insn))
460 BLOCK_FOR_INSN (insn) = NULL;
461 return 0;
464 namespace {
466 const pass_data pass_data_free_cfg =
468 RTL_PASS, /* type */
469 "*free_cfg", /* name */
470 OPTGROUP_NONE, /* optinfo_flags */
471 TV_NONE, /* tv_id */
472 0, /* properties_required */
473 0, /* properties_provided */
474 PROP_cfg, /* properties_destroyed */
475 0, /* todo_flags_start */
476 0, /* todo_flags_finish */
479 class pass_free_cfg : public rtl_opt_pass
481 public:
482 pass_free_cfg (gcc::context *ctxt)
483 : rtl_opt_pass (pass_data_free_cfg, ctxt)
486 /* opt_pass methods: */
487 virtual unsigned int execute (function *);
489 }; // class pass_free_cfg
491 unsigned int
492 pass_free_cfg::execute (function *)
494 #ifdef DELAY_SLOTS
495 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
496 valid at that point so it would be too late to call df_analyze. */
497 if (optimize > 0 && flag_delayed_branch)
499 df_note_add_problem ();
500 df_analyze ();
502 #endif
504 if (crtl->has_bb_partition)
505 insert_section_boundary_note ();
507 free_bb_for_insn ();
508 return 0;
511 } // anon namespace
513 rtl_opt_pass *
514 make_pass_free_cfg (gcc::context *ctxt)
516 return new pass_free_cfg (ctxt);
519 /* Return RTX to emit after when we want to emit code on the entry of function. */
520 rtx_insn *
521 entry_of_function (void)
523 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
524 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
527 /* Emit INSN at the entry point of the function, ensuring that it is only
528 executed once per function. */
529 void
530 emit_insn_at_entry (rtx insn)
532 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
533 edge e = ei_safe_edge (ei);
534 gcc_assert (e->flags & EDGE_FALLTHRU);
536 insert_insn_on_edge (insn, e);
537 commit_edge_insertions ();
540 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
541 (or BARRIER if found) and notify df of the bb change.
542 The insn chain range is inclusive
543 (i.e. both BEGIN and END will be updated. */
545 static void
546 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
548 rtx_insn *insn;
550 end = NEXT_INSN (end);
551 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
552 if (!BARRIER_P (insn))
553 df_insn_change_bb (insn, bb);
556 /* Update BLOCK_FOR_INSN of insns in BB to BB,
557 and notify df of the change. */
559 void
560 update_bb_for_insn (basic_block bb)
562 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
566 /* Like active_insn_p, except keep the return value clobber around
567 even after reload. */
569 static bool
570 flow_active_insn_p (const rtx_insn *insn)
572 if (active_insn_p (insn))
573 return true;
575 /* A clobber of the function return value exists for buggy
576 programs that fail to return a value. Its effect is to
577 keep the return value from being live across the entire
578 function. If we allow it to be skipped, we introduce the
579 possibility for register lifetime confusion. */
580 if (GET_CODE (PATTERN (insn)) == CLOBBER
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
585 return false;
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
591 bool
592 contains_no_active_insn_p (const_basic_block bb)
594 rtx_insn *insn;
596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
597 || !single_succ_p (bb))
598 return false;
600 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
601 if (INSN_P (insn) && flow_active_insn_p (insn))
602 return false;
604 return (!INSN_P (insn)
605 || (JUMP_P (insn) && simplejump_p (insn))
606 || !flow_active_insn_p (insn));
609 /* Likewise, but protect loop latches, headers and preheaders. */
610 /* FIXME: Make this a cfg hook. */
612 bool
613 forwarder_block_p (const_basic_block bb)
615 if (!contains_no_active_insn_p (bb))
616 return false;
618 /* Protect loop latches, headers and preheaders. */
619 if (current_loops)
621 basic_block dest;
622 if (bb->loop_father->header == bb)
623 return false;
624 dest = EDGE_SUCC (bb, 0)->dest;
625 if (dest->loop_father->header == dest)
626 return false;
629 return true;
632 /* Return nonzero if we can reach target from src by falling through. */
633 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
635 bool
636 can_fallthru (basic_block src, basic_block target)
638 rtx_insn *insn = BB_END (src);
639 rtx_insn *insn2;
640 edge e;
641 edge_iterator ei;
643 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
644 return true;
645 if (src->next_bb != target)
646 return false;
648 /* ??? Later we may add code to move jump tables offline. */
649 if (tablejump_p (insn, NULL, NULL))
650 return false;
652 FOR_EACH_EDGE (e, ei, src->succs)
653 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
654 && e->flags & EDGE_FALLTHRU)
655 return false;
657 insn2 = BB_HEAD (target);
658 if (!active_insn_p (insn2))
659 insn2 = next_active_insn (insn2);
661 return next_active_insn (insn) == insn2;
664 /* Return nonzero if we could reach target from src by falling through,
665 if the target was made adjacent. If we already have a fall-through
666 edge to the exit block, we can't do that. */
667 static bool
668 could_fall_through (basic_block src, basic_block target)
670 edge e;
671 edge_iterator ei;
673 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
674 return true;
675 FOR_EACH_EDGE (e, ei, src->succs)
676 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
677 && e->flags & EDGE_FALLTHRU)
678 return 0;
679 return true;
682 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
683 rtx_note *
684 bb_note (basic_block bb)
686 rtx_insn *note;
688 note = BB_HEAD (bb);
689 if (LABEL_P (note))
690 note = NEXT_INSN (note);
692 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
693 return as_a <rtx_note *> (note);
696 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
697 note associated with the BLOCK. */
699 static rtx_insn *
700 first_insn_after_basic_block_note (basic_block block)
702 rtx_insn *insn;
704 /* Get the first instruction in the block. */
705 insn = BB_HEAD (block);
707 if (insn == NULL_RTX)
708 return NULL;
709 if (LABEL_P (insn))
710 insn = NEXT_INSN (insn);
711 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
713 return NEXT_INSN (insn);
716 /* Creates a new basic block just after basic block BB by splitting
717 everything after specified instruction INSNP. */
719 static basic_block
720 rtl_split_block (basic_block bb, void *insnp)
722 basic_block new_bb;
723 rtx_insn *insn = (rtx_insn *) insnp;
724 edge e;
725 edge_iterator ei;
727 if (!insn)
729 insn = first_insn_after_basic_block_note (bb);
731 if (insn)
733 rtx_insn *next = insn;
735 insn = PREV_INSN (insn);
737 /* If the block contains only debug insns, insn would have
738 been NULL in a non-debug compilation, and then we'd end
739 up emitting a DELETED note. For -fcompare-debug
740 stability, emit the note too. */
741 if (insn != BB_END (bb)
742 && DEBUG_INSN_P (next)
743 && DEBUG_INSN_P (BB_END (bb)))
745 while (next != BB_END (bb) && DEBUG_INSN_P (next))
746 next = NEXT_INSN (next);
748 if (next == BB_END (bb))
749 emit_note_after (NOTE_INSN_DELETED, next);
752 else
753 insn = get_last_insn ();
756 /* We probably should check type of the insn so that we do not create
757 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
758 bother. */
759 if (insn == BB_END (bb))
760 emit_note_after (NOTE_INSN_DELETED, insn);
762 /* Create the new basic block. */
763 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
764 BB_COPY_PARTITION (new_bb, bb);
765 BB_END (bb) = insn;
767 /* Redirect the outgoing edges. */
768 new_bb->succs = bb->succs;
769 bb->succs = NULL;
770 FOR_EACH_EDGE (e, ei, new_bb->succs)
771 e->src = new_bb;
773 /* The new block starts off being dirty. */
774 df_set_bb_dirty (bb);
775 return new_bb;
778 /* Return true if the single edge between blocks A and B is the only place
779 in RTL which holds some unique locus. */
781 static bool
782 unique_locus_on_edge_between_p (basic_block a, basic_block b)
784 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
785 rtx_insn *insn, *end;
787 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
788 return false;
790 /* First scan block A backward. */
791 insn = BB_END (a);
792 end = PREV_INSN (BB_HEAD (a));
793 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
794 insn = PREV_INSN (insn);
796 if (insn != end && INSN_LOCATION (insn) == goto_locus)
797 return false;
799 /* Then scan block B forward. */
800 insn = BB_HEAD (b);
801 if (insn)
803 end = NEXT_INSN (BB_END (b));
804 while (insn != end && !NONDEBUG_INSN_P (insn))
805 insn = NEXT_INSN (insn);
807 if (insn != end && INSN_HAS_LOCATION (insn)
808 && INSN_LOCATION (insn) == goto_locus)
809 return false;
812 return true;
815 /* If the single edge between blocks A and B is the only place in RTL which
816 holds some unique locus, emit a nop with that locus between the blocks. */
818 static void
819 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
821 if (!unique_locus_on_edge_between_p (a, b))
822 return;
824 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
825 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
828 /* Blocks A and B are to be merged into a single block A. The insns
829 are already contiguous. */
831 static void
832 rtl_merge_blocks (basic_block a, basic_block b)
834 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
835 rtx_insn *del_first = NULL, *del_last = NULL;
836 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
837 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
838 int b_empty = 0;
840 if (dump_file)
841 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
842 a->index);
844 while (DEBUG_INSN_P (b_end))
845 b_end = PREV_INSN (b_debug_start = b_end);
847 /* If there was a CODE_LABEL beginning B, delete it. */
848 if (LABEL_P (b_head))
850 /* Detect basic blocks with nothing but a label. This can happen
851 in particular at the end of a function. */
852 if (b_head == b_end)
853 b_empty = 1;
855 del_first = del_last = b_head;
856 b_head = NEXT_INSN (b_head);
859 /* Delete the basic block note and handle blocks containing just that
860 note. */
861 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
863 if (b_head == b_end)
864 b_empty = 1;
865 if (! del_last)
866 del_first = b_head;
868 del_last = b_head;
869 b_head = NEXT_INSN (b_head);
872 /* If there was a jump out of A, delete it. */
873 if (JUMP_P (a_end))
875 rtx_insn *prev;
877 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
878 if (!NOTE_P (prev)
879 || NOTE_INSN_BASIC_BLOCK_P (prev)
880 || prev == BB_HEAD (a))
881 break;
883 del_first = a_end;
885 #ifdef HAVE_cc0
886 /* If this was a conditional jump, we need to also delete
887 the insn that set cc0. */
888 if (only_sets_cc0_p (prev))
890 rtx_insn *tmp = prev;
892 prev = prev_nonnote_insn (prev);
893 if (!prev)
894 prev = BB_HEAD (a);
895 del_first = tmp;
897 #endif
899 a_end = PREV_INSN (del_first);
901 else if (BARRIER_P (NEXT_INSN (a_end)))
902 del_first = NEXT_INSN (a_end);
904 /* Delete everything marked above as well as crap that might be
905 hanging out between the two blocks. */
906 BB_END (a) = a_end;
907 BB_HEAD (b) = b_empty ? NULL : b_head;
908 delete_insn_chain (del_first, del_last, true);
910 /* When not optimizing and the edge is the only place in RTL which holds
911 some unique locus, emit a nop with that locus in between. */
912 if (!optimize)
914 emit_nop_for_unique_locus_between (a, b);
915 a_end = BB_END (a);
918 /* Reassociate the insns of B with A. */
919 if (!b_empty)
921 update_bb_for_insn_chain (a_end, b_debug_end, a);
923 BB_END (a) = b_debug_end;
924 BB_HEAD (b) = NULL;
926 else if (b_end != b_debug_end)
928 /* Move any deleted labels and other notes between the end of A
929 and the debug insns that make up B after the debug insns,
930 bringing the debug insns into A while keeping the notes after
931 the end of A. */
932 if (NEXT_INSN (a_end) != b_debug_start)
933 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
934 b_debug_end);
935 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
936 BB_END (a) = b_debug_end;
939 df_bb_delete (b->index);
941 /* If B was a forwarder block, propagate the locus on the edge. */
942 if (forwarder_p
943 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
944 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
946 if (dump_file)
947 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
951 /* Return true when block A and B can be merged. */
953 static bool
954 rtl_can_merge_blocks (basic_block a, basic_block b)
956 /* If we are partitioning hot/cold basic blocks, we don't want to
957 mess up unconditional or indirect jumps that cross between hot
958 and cold sections.
960 Basic block partitioning may result in some jumps that appear to
961 be optimizable (or blocks that appear to be mergeable), but which really
962 must be left untouched (they are required to make it safely across
963 partition boundaries). See the comments at the top of
964 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
966 if (BB_PARTITION (a) != BB_PARTITION (b))
967 return false;
969 /* Protect the loop latches. */
970 if (current_loops && b->loop_father->latch == b)
971 return false;
973 /* There must be exactly one edge in between the blocks. */
974 return (single_succ_p (a)
975 && single_succ (a) == b
976 && single_pred_p (b)
977 && a != b
978 /* Must be simple edge. */
979 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
980 && a->next_bb == b
981 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
982 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
983 /* If the jump insn has side effects,
984 we can't kill the edge. */
985 && (!JUMP_P (BB_END (a))
986 || (reload_completed
987 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
990 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
991 exist. */
994 block_label (basic_block block)
996 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
997 return NULL_RTX;
999 if (!LABEL_P (BB_HEAD (block)))
1001 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1004 return BB_HEAD (block);
1007 /* Attempt to perform edge redirection by replacing possibly complex jump
1008 instruction by unconditional jump or removing jump completely. This can
1009 apply only if all edges now point to the same block. The parameters and
1010 return values are equivalent to redirect_edge_and_branch. */
1012 edge
1013 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1015 basic_block src = e->src;
1016 rtx_insn *insn = BB_END (src), *kill_from;
1017 rtx set;
1018 int fallthru = 0;
1020 /* If we are partitioning hot/cold basic blocks, we don't want to
1021 mess up unconditional or indirect jumps that cross between hot
1022 and cold sections.
1024 Basic block partitioning may result in some jumps that appear to
1025 be optimizable (or blocks that appear to be mergeable), but which really
1026 must be left untouched (they are required to make it safely across
1027 partition boundaries). See the comments at the top of
1028 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1030 if (BB_PARTITION (src) != BB_PARTITION (target))
1031 return NULL;
1033 /* We can replace or remove a complex jump only when we have exactly
1034 two edges. Also, if we have exactly one outgoing edge, we can
1035 redirect that. */
1036 if (EDGE_COUNT (src->succs) >= 3
1037 /* Verify that all targets will be TARGET. Specifically, the
1038 edge that is not E must also go to TARGET. */
1039 || (EDGE_COUNT (src->succs) == 2
1040 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1041 return NULL;
1043 if (!onlyjump_p (insn))
1044 return NULL;
1045 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1046 return NULL;
1048 /* Avoid removing branch with side effects. */
1049 set = single_set (insn);
1050 if (!set || side_effects_p (set))
1051 return NULL;
1053 /* In case we zap a conditional jump, we'll need to kill
1054 the cc0 setter too. */
1055 kill_from = insn;
1056 #ifdef HAVE_cc0
1057 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1058 && only_sets_cc0_p (PREV_INSN (insn)))
1059 kill_from = PREV_INSN (insn);
1060 #endif
1062 /* See if we can create the fallthru edge. */
1063 if (in_cfglayout || can_fallthru (src, target))
1065 if (dump_file)
1066 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1067 fallthru = 1;
1069 /* Selectively unlink whole insn chain. */
1070 if (in_cfglayout)
1072 rtx_insn *insn = BB_FOOTER (src);
1074 delete_insn_chain (kill_from, BB_END (src), false);
1076 /* Remove barriers but keep jumptables. */
1077 while (insn)
1079 if (BARRIER_P (insn))
1081 if (PREV_INSN (insn))
1082 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1083 else
1084 BB_FOOTER (src) = NEXT_INSN (insn);
1085 if (NEXT_INSN (insn))
1086 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1088 if (LABEL_P (insn))
1089 break;
1090 insn = NEXT_INSN (insn);
1093 else
1094 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1095 false);
1098 /* If this already is simplejump, redirect it. */
1099 else if (simplejump_p (insn))
1101 if (e->dest == target)
1102 return NULL;
1103 if (dump_file)
1104 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1105 INSN_UID (insn), e->dest->index, target->index);
1106 if (!redirect_jump (insn, block_label (target), 0))
1108 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1109 return NULL;
1113 /* Cannot do anything for target exit block. */
1114 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1115 return NULL;
1117 /* Or replace possibly complicated jump insn by simple jump insn. */
1118 else
1120 rtx target_label = block_label (target);
1121 rtx_insn *barrier;
1122 rtx label;
1123 rtx_jump_table_data *table;
1125 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1126 JUMP_LABEL (BB_END (src)) = target_label;
1127 LABEL_NUSES (target_label)++;
1128 if (dump_file)
1129 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1130 INSN_UID (insn), INSN_UID (BB_END (src)));
1133 delete_insn_chain (kill_from, insn, false);
1135 /* Recognize a tablejump that we are converting to a
1136 simple jump and remove its associated CODE_LABEL
1137 and ADDR_VEC or ADDR_DIFF_VEC. */
1138 if (tablejump_p (insn, &label, &table))
1139 delete_insn_chain (label, table, false);
1141 barrier = next_nonnote_insn (BB_END (src));
1142 if (!barrier || !BARRIER_P (barrier))
1143 emit_barrier_after (BB_END (src));
1144 else
1146 if (barrier != NEXT_INSN (BB_END (src)))
1148 /* Move the jump before barrier so that the notes
1149 which originally were or were created before jump table are
1150 inside the basic block. */
1151 rtx_insn *new_insn = BB_END (src);
1153 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1154 PREV_INSN (barrier), src);
1156 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1157 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1159 SET_NEXT_INSN (new_insn) = barrier;
1160 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1162 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1163 SET_PREV_INSN (barrier) = new_insn;
1168 /* Keep only one edge out and set proper flags. */
1169 if (!single_succ_p (src))
1170 remove_edge (e);
1171 gcc_assert (single_succ_p (src));
1173 e = single_succ_edge (src);
1174 if (fallthru)
1175 e->flags = EDGE_FALLTHRU;
1176 else
1177 e->flags = 0;
1179 e->probability = REG_BR_PROB_BASE;
1180 e->count = src->count;
1182 if (e->dest != target)
1183 redirect_edge_succ (e, target);
1184 return e;
1187 /* Subroutine of redirect_branch_edge that tries to patch the jump
1188 instruction INSN so that it reaches block NEW. Do this
1189 only when it originally reached block OLD. Return true if this
1190 worked or the original target wasn't OLD, return false if redirection
1191 doesn't work. */
1193 static bool
1194 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1196 rtx_jump_table_data *table;
1197 rtx tmp;
1198 /* Recognize a tablejump and adjust all matching cases. */
1199 if (tablejump_p (insn, NULL, &table))
1201 rtvec vec;
1202 int j;
1203 rtx new_label = block_label (new_bb);
1205 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1206 return false;
1207 vec = table->get_labels ();
1209 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1210 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1212 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1213 --LABEL_NUSES (old_label);
1214 ++LABEL_NUSES (new_label);
1217 /* Handle casesi dispatch insns. */
1218 if ((tmp = single_set (insn)) != NULL
1219 && SET_DEST (tmp) == pc_rtx
1220 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1221 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1222 && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
1224 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1225 new_label);
1226 --LABEL_NUSES (old_label);
1227 ++LABEL_NUSES (new_label);
1230 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1232 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1233 rtx new_label, note;
1235 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1236 return false;
1237 new_label = block_label (new_bb);
1239 for (i = 0; i < n; ++i)
1241 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1242 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1243 if (XEXP (old_ref, 0) == old_label)
1245 ASM_OPERANDS_LABEL (tmp, i)
1246 = gen_rtx_LABEL_REF (Pmode, new_label);
1247 --LABEL_NUSES (old_label);
1248 ++LABEL_NUSES (new_label);
1252 if (JUMP_LABEL (insn) == old_label)
1254 JUMP_LABEL (insn) = new_label;
1255 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1256 if (note)
1257 remove_note (insn, note);
1259 else
1261 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1262 if (note)
1263 remove_note (insn, note);
1264 if (JUMP_LABEL (insn) != new_label
1265 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1266 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1268 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1269 != NULL_RTX)
1270 XEXP (note, 0) = new_label;
1272 else
1274 /* ?? We may play the games with moving the named labels from
1275 one basic block to the other in case only one computed_jump is
1276 available. */
1277 if (computed_jump_p (insn)
1278 /* A return instruction can't be redirected. */
1279 || returnjump_p (insn))
1280 return false;
1282 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1284 /* If the insn doesn't go where we think, we're confused. */
1285 gcc_assert (JUMP_LABEL (insn) == old_label);
1287 /* If the substitution doesn't succeed, die. This can happen
1288 if the back end emitted unrecognizable instructions or if
1289 target is exit block on some arches. */
1290 if (!redirect_jump (insn, block_label (new_bb), 0))
1292 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
1293 return false;
1297 return true;
1301 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1302 NULL on failure */
1303 static edge
1304 redirect_branch_edge (edge e, basic_block target)
1306 rtx_insn *old_label = BB_HEAD (e->dest);
1307 basic_block src = e->src;
1308 rtx_insn *insn = BB_END (src);
1310 /* We can only redirect non-fallthru edges of jump insn. */
1311 if (e->flags & EDGE_FALLTHRU)
1312 return NULL;
1313 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1314 return NULL;
1316 if (!currently_expanding_to_rtl)
1318 if (!patch_jump_insn (insn, old_label, target))
1319 return NULL;
1321 else
1322 /* When expanding this BB might actually contain multiple
1323 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1324 Redirect all of those that match our label. */
1325 FOR_BB_INSNS (src, insn)
1326 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1327 return NULL;
1329 if (dump_file)
1330 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1331 e->src->index, e->dest->index, target->index);
1333 if (e->dest != target)
1334 e = redirect_edge_succ_nodup (e, target);
1336 return e;
1339 /* Called when edge E has been redirected to a new destination,
1340 in order to update the region crossing flag on the edge and
1341 jump. */
1343 static void
1344 fixup_partition_crossing (edge e)
1346 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1347 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1348 return;
1349 /* If we redirected an existing edge, it may already be marked
1350 crossing, even though the new src is missing a reg crossing note.
1351 But make sure reg crossing note doesn't already exist before
1352 inserting. */
1353 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1355 e->flags |= EDGE_CROSSING;
1356 if (JUMP_P (BB_END (e->src))
1357 && !CROSSING_JUMP_P (BB_END (e->src)))
1358 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1360 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1362 e->flags &= ~EDGE_CROSSING;
1363 /* Remove the section crossing note from jump at end of
1364 src if it exists, and if no other successors are
1365 still crossing. */
1366 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1368 bool has_crossing_succ = false;
1369 edge e2;
1370 edge_iterator ei;
1371 FOR_EACH_EDGE (e2, ei, e->src->succs)
1373 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1374 if (has_crossing_succ)
1375 break;
1377 if (!has_crossing_succ)
1378 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1383 /* Called when block BB has been reassigned to the cold partition,
1384 because it is now dominated by another cold block,
1385 to ensure that the region crossing attributes are updated. */
1387 static void
1388 fixup_new_cold_bb (basic_block bb)
1390 edge e;
1391 edge_iterator ei;
1393 /* This is called when a hot bb is found to now be dominated
1394 by a cold bb and therefore needs to become cold. Therefore,
1395 its preds will no longer be region crossing. Any non-dominating
1396 preds that were previously hot would also have become cold
1397 in the caller for the same region. Any preds that were previously
1398 region-crossing will be adjusted in fixup_partition_crossing. */
1399 FOR_EACH_EDGE (e, ei, bb->preds)
1401 fixup_partition_crossing (e);
1404 /* Possibly need to make bb's successor edges region crossing,
1405 or remove stale region crossing. */
1406 FOR_EACH_EDGE (e, ei, bb->succs)
1408 /* We can't have fall-through edges across partition boundaries.
1409 Note that force_nonfallthru will do any necessary partition
1410 boundary fixup by calling fixup_partition_crossing itself. */
1411 if ((e->flags & EDGE_FALLTHRU)
1412 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1413 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1414 force_nonfallthru (e);
1415 else
1416 fixup_partition_crossing (e);
1420 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1421 expense of adding new instructions or reordering basic blocks.
1423 Function can be also called with edge destination equivalent to the TARGET.
1424 Then it should try the simplifications and do nothing if none is possible.
1426 Return edge representing the branch if transformation succeeded. Return NULL
1427 on failure.
1428 We still return NULL in case E already destinated TARGET and we didn't
1429 managed to simplify instruction stream. */
1431 static edge
1432 rtl_redirect_edge_and_branch (edge e, basic_block target)
1434 edge ret;
1435 basic_block src = e->src;
1436 basic_block dest = e->dest;
1438 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1439 return NULL;
1441 if (dest == target)
1442 return e;
1444 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1446 df_set_bb_dirty (src);
1447 fixup_partition_crossing (ret);
1448 return ret;
1451 ret = redirect_branch_edge (e, target);
1452 if (!ret)
1453 return NULL;
1455 df_set_bb_dirty (src);
1456 fixup_partition_crossing (ret);
1457 return ret;
1460 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1462 void
1463 emit_barrier_after_bb (basic_block bb)
1465 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1466 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1467 || current_ir_type () == IR_RTL_CFGLAYOUT);
1468 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1470 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1472 if (BB_FOOTER (bb))
1474 rtx_insn *footer_tail = BB_FOOTER (bb);
1476 while (NEXT_INSN (footer_tail))
1477 footer_tail = NEXT_INSN (footer_tail);
1478 if (!BARRIER_P (footer_tail))
1480 SET_NEXT_INSN (footer_tail) = insn;
1481 SET_PREV_INSN (insn) = footer_tail;
1484 else
1485 BB_FOOTER (bb) = insn;
1489 /* Like force_nonfallthru below, but additionally performs redirection
1490 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1491 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1492 simple_return_rtx, indicating which kind of returnjump to create.
1493 It should be NULL otherwise. */
1495 basic_block
1496 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1498 basic_block jump_block, new_bb = NULL, src = e->src;
1499 rtx note;
1500 edge new_edge;
1501 int abnormal_edge_flags = 0;
1502 bool asm_goto_edge = false;
1503 int loc;
1505 /* In the case the last instruction is conditional jump to the next
1506 instruction, first redirect the jump itself and then continue
1507 by creating a basic block afterwards to redirect fallthru edge. */
1508 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1509 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1510 && any_condjump_p (BB_END (e->src))
1511 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1513 rtx note;
1514 edge b = unchecked_make_edge (e->src, target, 0);
1515 bool redirected;
1517 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1518 gcc_assert (redirected);
1520 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1521 if (note)
1523 int prob = XINT (note, 0);
1525 b->probability = prob;
1526 /* Update this to use GCOV_COMPUTE_SCALE. */
1527 b->count = e->count * prob / REG_BR_PROB_BASE;
1528 e->probability -= e->probability;
1529 e->count -= b->count;
1530 if (e->probability < 0)
1531 e->probability = 0;
1532 if (e->count < 0)
1533 e->count = 0;
1537 if (e->flags & EDGE_ABNORMAL)
1539 /* Irritating special case - fallthru edge to the same block as abnormal
1540 edge.
1541 We can't redirect abnormal edge, but we still can split the fallthru
1542 one and create separate abnormal edge to original destination.
1543 This allows bb-reorder to make such edge non-fallthru. */
1544 gcc_assert (e->dest == target);
1545 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1546 e->flags &= EDGE_FALLTHRU;
1548 else
1550 gcc_assert (e->flags & EDGE_FALLTHRU);
1551 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1553 /* We can't redirect the entry block. Create an empty block
1554 at the start of the function which we use to add the new
1555 jump. */
1556 edge tmp;
1557 edge_iterator ei;
1558 bool found = false;
1560 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1561 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1563 /* Change the existing edge's source to be the new block, and add
1564 a new edge from the entry block to the new block. */
1565 e->src = bb;
1566 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1567 (tmp = ei_safe_edge (ei)); )
1569 if (tmp == e)
1571 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1572 found = true;
1573 break;
1575 else
1576 ei_next (&ei);
1579 gcc_assert (found);
1581 vec_safe_push (bb->succs, e);
1582 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1583 EDGE_FALLTHRU);
1587 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1588 don't point to the target or fallthru label. */
1589 if (JUMP_P (BB_END (e->src))
1590 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1591 && (e->flags & EDGE_FALLTHRU)
1592 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1594 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1595 bool adjust_jump_target = false;
1597 for (i = 0; i < n; ++i)
1599 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1601 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1602 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1603 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1604 adjust_jump_target = true;
1606 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1607 asm_goto_edge = true;
1609 if (adjust_jump_target)
1611 rtx_insn *insn = BB_END (e->src);
1612 rtx note;
1613 rtx_insn *old_label = BB_HEAD (e->dest);
1614 rtx_insn *new_label = BB_HEAD (target);
1616 if (JUMP_LABEL (insn) == old_label)
1618 JUMP_LABEL (insn) = new_label;
1619 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1620 if (note)
1621 remove_note (insn, note);
1623 else
1625 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1626 if (note)
1627 remove_note (insn, note);
1628 if (JUMP_LABEL (insn) != new_label
1629 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1630 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1632 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1633 != NULL_RTX)
1634 XEXP (note, 0) = new_label;
1638 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1640 rtx_insn *new_head;
1641 gcov_type count = e->count;
1642 int probability = e->probability;
1643 /* Create the new structures. */
1645 /* If the old block ended with a tablejump, skip its table
1646 by searching forward from there. Otherwise start searching
1647 forward from the last instruction of the old block. */
1648 rtx_jump_table_data *table;
1649 if (tablejump_p (BB_END (e->src), NULL, &table))
1650 new_head = table;
1651 else
1652 new_head = BB_END (e->src);
1653 new_head = NEXT_INSN (new_head);
1655 jump_block = create_basic_block (new_head, NULL, e->src);
1656 jump_block->count = count;
1657 jump_block->frequency = EDGE_FREQUENCY (e);
1659 /* Make sure new block ends up in correct hot/cold section. */
1661 BB_COPY_PARTITION (jump_block, e->src);
1663 /* Wire edge in. */
1664 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1665 new_edge->probability = probability;
1666 new_edge->count = count;
1668 /* Redirect old edge. */
1669 redirect_edge_pred (e, jump_block);
1670 e->probability = REG_BR_PROB_BASE;
1672 /* If e->src was previously region crossing, it no longer is
1673 and the reg crossing note should be removed. */
1674 fixup_partition_crossing (new_edge);
1676 /* If asm goto has any label refs to target's label,
1677 add also edge from asm goto bb to target. */
1678 if (asm_goto_edge)
1680 new_edge->probability /= 2;
1681 new_edge->count /= 2;
1682 jump_block->count /= 2;
1683 jump_block->frequency /= 2;
1684 new_edge = make_edge (new_edge->src, target,
1685 e->flags & ~EDGE_FALLTHRU);
1686 new_edge->probability = probability - probability / 2;
1687 new_edge->count = count - count / 2;
1690 new_bb = jump_block;
1692 else
1693 jump_block = e->src;
1695 loc = e->goto_locus;
1696 e->flags &= ~EDGE_FALLTHRU;
1697 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1699 if (jump_label == ret_rtx)
1701 #ifdef HAVE_return
1702 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1703 #else
1704 gcc_unreachable ();
1705 #endif
1707 else
1709 gcc_assert (jump_label == simple_return_rtx);
1710 #ifdef HAVE_simple_return
1711 emit_jump_insn_after_setloc (gen_simple_return (),
1712 BB_END (jump_block), loc);
1713 #else
1714 gcc_unreachable ();
1715 #endif
1717 set_return_jump_label (BB_END (jump_block));
1719 else
1721 rtx label = block_label (target);
1722 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1723 JUMP_LABEL (BB_END (jump_block)) = label;
1724 LABEL_NUSES (label)++;
1727 /* We might be in cfg layout mode, and if so, the following routine will
1728 insert the barrier correctly. */
1729 emit_barrier_after_bb (jump_block);
1730 redirect_edge_succ_nodup (e, target);
1732 if (abnormal_edge_flags)
1733 make_edge (src, target, abnormal_edge_flags);
1735 df_mark_solutions_dirty ();
1736 fixup_partition_crossing (e);
1737 return new_bb;
1740 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1741 (and possibly create new basic block) to make edge non-fallthru.
1742 Return newly created BB or NULL if none. */
1744 static basic_block
1745 rtl_force_nonfallthru (edge e)
1747 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1750 /* Redirect edge even at the expense of creating new jump insn or
1751 basic block. Return new basic block if created, NULL otherwise.
1752 Conversion must be possible. */
1754 static basic_block
1755 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1757 if (redirect_edge_and_branch (e, target)
1758 || e->dest == target)
1759 return NULL;
1761 /* In case the edge redirection failed, try to force it to be non-fallthru
1762 and redirect newly created simplejump. */
1763 df_set_bb_dirty (e->src);
1764 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1767 /* The given edge should potentially be a fallthru edge. If that is in
1768 fact true, delete the jump and barriers that are in the way. */
1770 static void
1771 rtl_tidy_fallthru_edge (edge e)
1773 rtx_insn *q;
1774 basic_block b = e->src, c = b->next_bb;
1776 /* ??? In a late-running flow pass, other folks may have deleted basic
1777 blocks by nopping out blocks, leaving multiple BARRIERs between here
1778 and the target label. They ought to be chastised and fixed.
1780 We can also wind up with a sequence of undeletable labels between
1781 one block and the next.
1783 So search through a sequence of barriers, labels, and notes for
1784 the head of block C and assert that we really do fall through. */
1786 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1787 if (INSN_P (q))
1788 return;
1790 /* Remove what will soon cease being the jump insn from the source block.
1791 If block B consisted only of this single jump, turn it into a deleted
1792 note. */
1793 q = BB_END (b);
1794 if (JUMP_P (q)
1795 && onlyjump_p (q)
1796 && (any_uncondjump_p (q)
1797 || single_succ_p (b)))
1799 rtx label;
1800 rtx_jump_table_data *table;
1802 if (tablejump_p (q, &label, &table))
1804 /* The label is likely mentioned in some instruction before
1805 the tablejump and might not be DCEd, so turn it into
1806 a note instead and move before the tablejump that is going to
1807 be deleted. */
1808 const char *name = LABEL_NAME (label);
1809 PUT_CODE (label, NOTE);
1810 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1811 NOTE_DELETED_LABEL_NAME (label) = name;
1812 rtx_insn *lab = safe_as_a <rtx_insn *> (label);
1813 reorder_insns (lab, lab, PREV_INSN (q));
1814 delete_insn (table);
1817 #ifdef HAVE_cc0
1818 /* If this was a conditional jump, we need to also delete
1819 the insn that set cc0. */
1820 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1821 q = PREV_INSN (q);
1822 #endif
1824 q = PREV_INSN (q);
1827 /* Selectively unlink the sequence. */
1828 if (q != PREV_INSN (BB_HEAD (c)))
1829 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1831 e->flags |= EDGE_FALLTHRU;
1834 /* Should move basic block BB after basic block AFTER. NIY. */
1836 static bool
1837 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1838 basic_block after ATTRIBUTE_UNUSED)
1840 return false;
1843 /* Locate the last bb in the same partition as START_BB. */
1845 static basic_block
1846 last_bb_in_partition (basic_block start_bb)
1848 basic_block bb;
1849 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1851 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1852 return bb;
1854 /* Return bb before the exit block. */
1855 return bb->prev_bb;
1858 /* Split a (typically critical) edge. Return the new block.
1859 The edge must not be abnormal.
1861 ??? The code generally expects to be called on critical edges.
1862 The case of a block ending in an unconditional jump to a
1863 block with multiple predecessors is not handled optimally. */
1865 static basic_block
1866 rtl_split_edge (edge edge_in)
1868 basic_block bb, new_bb;
1869 rtx_insn *before;
1871 /* Abnormal edges cannot be split. */
1872 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1874 /* We are going to place the new block in front of edge destination.
1875 Avoid existence of fallthru predecessors. */
1876 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1878 edge e = find_fallthru_edge (edge_in->dest->preds);
1880 if (e)
1881 force_nonfallthru (e);
1884 /* Create the basic block note. */
1885 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1886 before = BB_HEAD (edge_in->dest);
1887 else
1888 before = NULL;
1890 /* If this is a fall through edge to the exit block, the blocks might be
1891 not adjacent, and the right place is after the source. */
1892 if ((edge_in->flags & EDGE_FALLTHRU)
1893 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1895 before = NEXT_INSN (BB_END (edge_in->src));
1896 bb = create_basic_block (before, NULL, edge_in->src);
1897 BB_COPY_PARTITION (bb, edge_in->src);
1899 else
1901 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1903 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1904 BB_COPY_PARTITION (bb, edge_in->dest);
1906 else
1908 basic_block after = edge_in->dest->prev_bb;
1909 /* If this is post-bb reordering, and the edge crosses a partition
1910 boundary, the new block needs to be inserted in the bb chain
1911 at the end of the src partition (since we put the new bb into
1912 that partition, see below). Otherwise we may end up creating
1913 an extra partition crossing in the chain, which is illegal.
1914 It can't go after the src, because src may have a fall-through
1915 to a different block. */
1916 if (crtl->bb_reorder_complete
1917 && (edge_in->flags & EDGE_CROSSING))
1919 after = last_bb_in_partition (edge_in->src);
1920 before = NEXT_INSN (BB_END (after));
1921 /* The instruction following the last bb in partition should
1922 be a barrier, since it cannot end in a fall-through. */
1923 gcc_checking_assert (BARRIER_P (before));
1924 before = NEXT_INSN (before);
1926 bb = create_basic_block (before, NULL, after);
1927 /* Put the split bb into the src partition, to avoid creating
1928 a situation where a cold bb dominates a hot bb, in the case
1929 where src is cold and dest is hot. The src will dominate
1930 the new bb (whereas it might not have dominated dest). */
1931 BB_COPY_PARTITION (bb, edge_in->src);
1935 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1937 /* Can't allow a region crossing edge to be fallthrough. */
1938 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1939 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1941 new_bb = force_nonfallthru (single_succ_edge (bb));
1942 gcc_assert (!new_bb);
1945 /* For non-fallthru edges, we must adjust the predecessor's
1946 jump instruction to target our new block. */
1947 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1949 edge redirected = redirect_edge_and_branch (edge_in, bb);
1950 gcc_assert (redirected);
1952 else
1954 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1956 /* For asm goto even splitting of fallthru edge might
1957 need insn patching, as other labels might point to the
1958 old label. */
1959 rtx_insn *last = BB_END (edge_in->src);
1960 if (last
1961 && JUMP_P (last)
1962 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1963 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1964 && patch_jump_insn (last, before, bb))
1965 df_set_bb_dirty (edge_in->src);
1967 redirect_edge_succ (edge_in, bb);
1970 return bb;
1973 /* Queue instructions for insertion on an edge between two basic blocks.
1974 The new instructions and basic blocks (if any) will not appear in the
1975 CFG until commit_edge_insertions is called. */
1977 void
1978 insert_insn_on_edge (rtx pattern, edge e)
1980 /* We cannot insert instructions on an abnormal critical edge.
1981 It will be easier to find the culprit if we die now. */
1982 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1984 if (e->insns.r == NULL_RTX)
1985 start_sequence ();
1986 else
1987 push_to_sequence (e->insns.r);
1989 emit_insn (pattern);
1991 e->insns.r = get_insns ();
1992 end_sequence ();
1995 /* Update the CFG for the instructions queued on edge E. */
1997 void
1998 commit_one_edge_insertion (edge e)
2000 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
2001 basic_block bb;
2003 /* Pull the insns off the edge now since the edge might go away. */
2004 insns = e->insns.r;
2005 e->insns.r = NULL;
2007 /* Figure out where to put these insns. If the destination has
2008 one predecessor, insert there. Except for the exit block. */
2009 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2011 bb = e->dest;
2013 /* Get the location correct wrt a code label, and "nice" wrt
2014 a basic block note, and before everything else. */
2015 tmp = BB_HEAD (bb);
2016 if (LABEL_P (tmp))
2017 tmp = NEXT_INSN (tmp);
2018 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2019 tmp = NEXT_INSN (tmp);
2020 if (tmp == BB_HEAD (bb))
2021 before = tmp;
2022 else if (tmp)
2023 after = PREV_INSN (tmp);
2024 else
2025 after = get_last_insn ();
2028 /* If the source has one successor and the edge is not abnormal,
2029 insert there. Except for the entry block.
2030 Don't do this if the predecessor ends in a jump other than
2031 unconditional simple jump. E.g. for asm goto that points all
2032 its labels at the fallthru basic block, we can't insert instructions
2033 before the asm goto, as the asm goto can have various of side effects,
2034 and can't emit instructions after the asm goto, as it must end
2035 the basic block. */
2036 else if ((e->flags & EDGE_ABNORMAL) == 0
2037 && single_succ_p (e->src)
2038 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2039 && (!JUMP_P (BB_END (e->src))
2040 || simplejump_p (BB_END (e->src))))
2042 bb = e->src;
2044 /* It is possible to have a non-simple jump here. Consider a target
2045 where some forms of unconditional jumps clobber a register. This
2046 happens on the fr30 for example.
2048 We know this block has a single successor, so we can just emit
2049 the queued insns before the jump. */
2050 if (JUMP_P (BB_END (bb)))
2051 before = BB_END (bb);
2052 else
2054 /* We'd better be fallthru, or we've lost track of what's what. */
2055 gcc_assert (e->flags & EDGE_FALLTHRU);
2057 after = BB_END (bb);
2061 /* Otherwise we must split the edge. */
2062 else
2064 bb = split_edge (e);
2066 /* If E crossed a partition boundary, we needed to make bb end in
2067 a region-crossing jump, even though it was originally fallthru. */
2068 if (JUMP_P (BB_END (bb)))
2069 before = BB_END (bb);
2070 else
2071 after = BB_END (bb);
2074 /* Now that we've found the spot, do the insertion. */
2075 if (before)
2077 emit_insn_before_noloc (insns, before, bb);
2078 last = prev_nonnote_insn (before);
2080 else
2081 last = emit_insn_after_noloc (insns, after, bb);
2083 if (returnjump_p (last))
2085 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2086 This is not currently a problem because this only happens
2087 for the (single) epilogue, which already has a fallthru edge
2088 to EXIT. */
2090 e = single_succ_edge (bb);
2091 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2092 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2094 e->flags &= ~EDGE_FALLTHRU;
2095 emit_barrier_after (last);
2097 if (before)
2098 delete_insn (before);
2100 else
2101 gcc_assert (!JUMP_P (last));
2104 /* Update the CFG for all queued instructions. */
2106 void
2107 commit_edge_insertions (void)
2109 basic_block bb;
2111 /* Optimization passes that invoke this routine can cause hot blocks
2112 previously reached by both hot and cold blocks to become dominated only
2113 by cold blocks. This will cause the verification below to fail,
2114 and lead to now cold code in the hot section. In some cases this
2115 may only be visible after newly unreachable blocks are deleted,
2116 which will be done by fixup_partitions. */
2117 fixup_partitions ();
2119 #ifdef ENABLE_CHECKING
2120 verify_flow_info ();
2121 #endif
2123 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2124 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2126 edge e;
2127 edge_iterator ei;
2129 FOR_EACH_EDGE (e, ei, bb->succs)
2130 if (e->insns.r)
2131 commit_one_edge_insertion (e);
2136 /* Print out RTL-specific basic block information (live information
2137 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2138 documented in dumpfile.h. */
2140 static void
2141 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2143 rtx_insn *insn;
2144 rtx_insn *last;
2145 char *s_indent;
2147 s_indent = (char *) alloca ((size_t) indent + 1);
2148 memset (s_indent, ' ', (size_t) indent);
2149 s_indent[indent] = '\0';
2151 if (df && (flags & TDF_DETAILS))
2153 df_dump_top (bb, outf);
2154 putc ('\n', outf);
2157 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2158 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2159 insn = NEXT_INSN (insn))
2161 if (flags & TDF_DETAILS)
2162 df_dump_insn_top (insn, outf);
2163 if (! (flags & TDF_SLIM))
2164 print_rtl_single (outf, insn);
2165 else
2166 dump_insn_slim (outf, insn);
2167 if (flags & TDF_DETAILS)
2168 df_dump_insn_bottom (insn, outf);
2171 if (df && (flags & TDF_DETAILS))
2173 df_dump_bottom (bb, outf);
2174 putc ('\n', outf);
2179 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2180 for the start of each basic block. FLAGS are the TDF_* masks documented
2181 in dumpfile.h. */
2183 void
2184 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
2186 const rtx_insn *tmp_rtx;
2187 if (rtx_first == 0)
2188 fprintf (outf, "(nil)\n");
2189 else
2191 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2192 int max_uid = get_max_uid ();
2193 basic_block *start = XCNEWVEC (basic_block, max_uid);
2194 basic_block *end = XCNEWVEC (basic_block, max_uid);
2195 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2196 basic_block bb;
2198 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2199 insns, but the CFG is not maintained so the basic block info
2200 is not reliable. Therefore it's omitted from the dumps. */
2201 if (! (cfun->curr_properties & PROP_cfg))
2202 flags &= ~TDF_BLOCKS;
2204 if (df)
2205 df_dump_start (outf);
2207 if (flags & TDF_BLOCKS)
2209 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 rtx_insn *x;
2213 start[INSN_UID (BB_HEAD (bb))] = bb;
2214 end[INSN_UID (BB_END (bb))] = bb;
2215 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2217 enum bb_state state = IN_MULTIPLE_BB;
2219 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2220 state = IN_ONE_BB;
2221 in_bb_p[INSN_UID (x)] = state;
2223 if (x == BB_END (bb))
2224 break;
2229 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2231 if (flags & TDF_BLOCKS)
2233 bb = start[INSN_UID (tmp_rtx)];
2234 if (bb != NULL)
2236 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2237 if (df && (flags & TDF_DETAILS))
2238 df_dump_top (bb, outf);
2241 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2242 && !NOTE_P (tmp_rtx)
2243 && !BARRIER_P (tmp_rtx))
2244 fprintf (outf, ";; Insn is not within a basic block\n");
2245 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2246 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2249 if (flags & TDF_DETAILS)
2250 df_dump_insn_top (tmp_rtx, outf);
2251 if (! (flags & TDF_SLIM))
2252 print_rtl_single (outf, tmp_rtx);
2253 else
2254 dump_insn_slim (outf, tmp_rtx);
2255 if (flags & TDF_DETAILS)
2256 df_dump_insn_bottom (tmp_rtx, outf);
2258 if (flags & TDF_BLOCKS)
2260 bb = end[INSN_UID (tmp_rtx)];
2261 if (bb != NULL)
2263 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2264 if (df && (flags & TDF_DETAILS))
2265 df_dump_bottom (bb, outf);
2266 putc ('\n', outf);
2271 free (start);
2272 free (end);
2273 free (in_bb_p);
2277 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2279 void
2280 update_br_prob_note (basic_block bb)
2282 rtx note;
2283 if (!JUMP_P (BB_END (bb)))
2284 return;
2285 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2286 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
2287 return;
2288 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
2291 /* Get the last insn associated with block BB (that includes barriers and
2292 tablejumps after BB). */
2293 rtx_insn *
2294 get_last_bb_insn (basic_block bb)
2296 rtx_jump_table_data *table;
2297 rtx_insn *tmp;
2298 rtx_insn *end = BB_END (bb);
2300 /* Include any jump table following the basic block. */
2301 if (tablejump_p (end, NULL, &table))
2302 end = table;
2304 /* Include any barriers that may follow the basic block. */
2305 tmp = next_nonnote_insn_bb (end);
2306 while (tmp && BARRIER_P (tmp))
2308 end = tmp;
2309 tmp = next_nonnote_insn_bb (end);
2312 return end;
2315 /* Sanity check partition hotness to ensure that basic blocks in
2316   the cold partition don't dominate basic blocks in the hot partition.
2317 If FLAG_ONLY is true, report violations as errors. Otherwise
2318 re-mark the dominated blocks as cold, since this is run after
2319 cfg optimizations that may make hot blocks previously reached
2320 by both hot and cold blocks now only reachable along cold paths. */
2322 static vec<basic_block>
2323 find_partition_fixes (bool flag_only)
2325 basic_block bb;
2326 vec<basic_block> bbs_in_cold_partition = vNULL;
2327 vec<basic_block> bbs_to_fix = vNULL;
2329 /* Callers check this. */
2330 gcc_checking_assert (crtl->has_bb_partition);
2332 FOR_EACH_BB_FN (bb, cfun)
2333 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2334 bbs_in_cold_partition.safe_push (bb);
2336 if (bbs_in_cold_partition.is_empty ())
2337 return vNULL;
2339 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2341 if (dom_calculated_here)
2342 calculate_dominance_info (CDI_DOMINATORS);
2344 while (! bbs_in_cold_partition.is_empty ())
2346 bb = bbs_in_cold_partition.pop ();
2347 /* Any blocks dominated by a block in the cold section
2348 must also be cold. */
2349 basic_block son;
2350 for (son = first_dom_son (CDI_DOMINATORS, bb);
2351 son;
2352 son = next_dom_son (CDI_DOMINATORS, son))
2354 /* If son is not yet cold, then mark it cold here and
2355 enqueue it for further processing. */
2356 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2358 if (flag_only)
2359 error ("non-cold basic block %d dominated "
2360 "by a block in the cold partition (%d)", son->index, bb->index);
2361 else
2362 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2363 bbs_to_fix.safe_push (son);
2364 bbs_in_cold_partition.safe_push (son);
2369 if (dom_calculated_here)
2370 free_dominance_info (CDI_DOMINATORS);
2372 return bbs_to_fix;
2375 /* Perform cleanup on the hot/cold bb partitioning after optimization
2376 passes that modify the cfg. */
2378 void
2379 fixup_partitions (void)
2381 basic_block bb;
2383 if (!crtl->has_bb_partition)
2384 return;
2386 /* Delete any blocks that became unreachable and weren't
2387 already cleaned up, for example during edge forwarding
2388 and convert_jumps_to_returns. This will expose more
2389 opportunities for fixing the partition boundaries here.
2390 Also, the calculation of the dominance graph during verification
2391 will assert if there are unreachable nodes. */
2392 delete_unreachable_blocks ();
2394 /* If there are partitions, do a sanity check on them: A basic block in
2395   a cold partition cannot dominate a basic block in a hot partition.
2396 Fixup any that now violate this requirement, as a result of edge
2397 forwarding and unreachable block deletion.  */
2398 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2400 /* Do the partition fixup after all necessary blocks have been converted to
2401 cold, so that we only update the region crossings the minimum number of
2402 places, which can require forcing edges to be non fallthru. */
2403 while (! bbs_to_fix.is_empty ())
2405 bb = bbs_to_fix.pop ();
2406 fixup_new_cold_bb (bb);
2410 /* Verify, in the basic block chain, that there is at most one switch
2411 between hot/cold partitions. This condition will not be true until
2412 after reorder_basic_blocks is called. */
2414 static int
2415 verify_hot_cold_block_grouping (void)
2417 basic_block bb;
2418 int err = 0;
2419 bool switched_sections = false;
2420 int current_partition = BB_UNPARTITIONED;
2422 /* Even after bb reordering is complete, we go into cfglayout mode
2423 again (in compgoto). Ensure we don't call this before going back
2424 into linearized RTL when any layout fixes would have been committed. */
2425 if (!crtl->bb_reorder_complete
2426 || current_ir_type () != IR_RTL_CFGRTL)
2427 return err;
2429 FOR_EACH_BB_FN (bb, cfun)
2431 if (current_partition != BB_UNPARTITIONED
2432 && BB_PARTITION (bb) != current_partition)
2434 if (switched_sections)
2436 error ("multiple hot/cold transitions found (bb %i)",
2437 bb->index);
2438 err = 1;
2440 else
2441 switched_sections = true;
2443 if (!crtl->has_bb_partition)
2444 error ("partition found but function partition flag not set");
2446 current_partition = BB_PARTITION (bb);
2449 return err;
2453 /* Perform several checks on the edges out of each block, such as
2454 the consistency of the branch probabilities, the correctness
2455 of hot/cold partition crossing edges, and the number of expected
2456 successor edges. Also verify that the dominance relationship
2457 between hot/cold blocks is sane. */
2459 static int
2460 rtl_verify_edges (void)
2462 int err = 0;
2463 basic_block bb;
2465 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2467 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2468 int n_eh = 0, n_abnormal = 0;
2469 edge e, fallthru = NULL;
2470 edge_iterator ei;
2471 rtx note;
2472 bool has_crossing_edge = false;
2474 if (JUMP_P (BB_END (bb))
2475 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2476 && EDGE_COUNT (bb->succs) >= 2
2477 && any_condjump_p (BB_END (bb)))
2479 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
2480 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2482 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2483 XINT (note, 0), BRANCH_EDGE (bb)->probability);
2484 err = 1;
2488 FOR_EACH_EDGE (e, ei, bb->succs)
2490 bool is_crossing;
2492 if (e->flags & EDGE_FALLTHRU)
2493 n_fallthru++, fallthru = e;
2495 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2496 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2497 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2498 has_crossing_edge |= is_crossing;
2499 if (e->flags & EDGE_CROSSING)
2501 if (!is_crossing)
2503 error ("EDGE_CROSSING incorrectly set across same section");
2504 err = 1;
2506 if (e->flags & EDGE_FALLTHRU)
2508 error ("fallthru edge crosses section boundary in bb %i",
2509 e->src->index);
2510 err = 1;
2512 if (e->flags & EDGE_EH)
2514 error ("EH edge crosses section boundary in bb %i",
2515 e->src->index);
2516 err = 1;
2518 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2520 error ("No region crossing jump at section boundary in bb %i",
2521 bb->index);
2522 err = 1;
2525 else if (is_crossing)
2527 error ("EDGE_CROSSING missing across section boundary");
2528 err = 1;
2531 if ((e->flags & ~(EDGE_DFS_BACK
2532 | EDGE_CAN_FALLTHRU
2533 | EDGE_IRREDUCIBLE_LOOP
2534 | EDGE_LOOP_EXIT
2535 | EDGE_CROSSING
2536 | EDGE_PRESERVE)) == 0)
2537 n_branch++;
2539 if (e->flags & EDGE_ABNORMAL_CALL)
2540 n_abnormal_call++;
2542 if (e->flags & EDGE_SIBCALL)
2543 n_sibcall++;
2545 if (e->flags & EDGE_EH)
2546 n_eh++;
2548 if (e->flags & EDGE_ABNORMAL)
2549 n_abnormal++;
2552 if (!has_crossing_edge
2553 && JUMP_P (BB_END (bb))
2554 && CROSSING_JUMP_P (BB_END (bb)))
2556 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2557 error ("Region crossing jump across same section in bb %i",
2558 bb->index);
2559 err = 1;
2562 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2564 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2565 err = 1;
2567 if (n_eh > 1)
2569 error ("too many exception handling edges in bb %i", bb->index);
2570 err = 1;
2572 if (n_branch
2573 && (!JUMP_P (BB_END (bb))
2574 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2575 || any_condjump_p (BB_END (bb))))))
2577 error ("too many outgoing branch edges from bb %i", bb->index);
2578 err = 1;
2580 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2582 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2583 err = 1;
2585 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2587 error ("wrong number of branch edges after unconditional jump"
2588 " in bb %i", bb->index);
2589 err = 1;
2591 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2592 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2594 error ("wrong amount of branch edges after conditional jump"
2595 " in bb %i", bb->index);
2596 err = 1;
2598 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2600 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2601 err = 1;
2603 if (n_sibcall && !CALL_P (BB_END (bb)))
2605 error ("sibcall edges for non-call insn in bb %i", bb->index);
2606 err = 1;
2608 if (n_abnormal > n_eh
2609 && !(CALL_P (BB_END (bb))
2610 && n_abnormal == n_abnormal_call + n_sibcall)
2611 && (!JUMP_P (BB_END (bb))
2612 || any_condjump_p (BB_END (bb))
2613 || any_uncondjump_p (BB_END (bb))))
2615 error ("abnormal edges for no purpose in bb %i", bb->index);
2616 err = 1;
2620 /* If there are partitions, do a sanity check on them: A basic block in
2621   a cold partition cannot dominate a basic block in a hot partition.  */
2622 if (crtl->has_bb_partition && !err)
2624 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2625 err = !bbs_to_fix.is_empty ();
2628 /* Clean up. */
2629 return err;
2632 /* Checks on the instructions within blocks. Currently checks that each
2633 block starts with a basic block note, and that basic block notes and
2634 control flow jumps are not found in the middle of the block. */
2636 static int
2637 rtl_verify_bb_insns (void)
2639 rtx_insn *x;
2640 int err = 0;
2641 basic_block bb;
2643 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2645 /* Now check the header of basic
2646 block. It ought to contain optional CODE_LABEL followed
2647 by NOTE_BASIC_BLOCK. */
2648 x = BB_HEAD (bb);
2649 if (LABEL_P (x))
2651 if (BB_END (bb) == x)
2653 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2654 bb->index);
2655 err = 1;
2658 x = NEXT_INSN (x);
2661 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2663 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2664 bb->index);
2665 err = 1;
2668 if (BB_END (bb) == x)
2669 /* Do checks for empty blocks here. */
2671 else
2672 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2674 if (NOTE_INSN_BASIC_BLOCK_P (x))
2676 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2677 INSN_UID (x), bb->index);
2678 err = 1;
2681 if (x == BB_END (bb))
2682 break;
2684 if (control_flow_insn_p (x))
2686 error ("in basic block %d:", bb->index);
2687 fatal_insn ("flow control insn inside a basic block", x);
2692 /* Clean up. */
2693 return err;
2696 /* Verify that block pointers for instructions in basic blocks, headers and
2697 footers are set appropriately. */
2699 static int
2700 rtl_verify_bb_pointers (void)
2702 int err = 0;
2703 basic_block bb;
2705 /* Check the general integrity of the basic blocks. */
2706 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2708 rtx_insn *insn;
2710 if (!(bb->flags & BB_RTL))
2712 error ("BB_RTL flag not set for block %d", bb->index);
2713 err = 1;
2716 FOR_BB_INSNS (bb, insn)
2717 if (BLOCK_FOR_INSN (insn) != bb)
2719 error ("insn %d basic block pointer is %d, should be %d",
2720 INSN_UID (insn),
2721 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2722 bb->index);
2723 err = 1;
2726 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2727 if (!BARRIER_P (insn)
2728 && BLOCK_FOR_INSN (insn) != NULL)
2730 error ("insn %d in header of bb %d has non-NULL basic block",
2731 INSN_UID (insn), bb->index);
2732 err = 1;
2734 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2735 if (!BARRIER_P (insn)
2736 && BLOCK_FOR_INSN (insn) != NULL)
2738 error ("insn %d in footer of bb %d has non-NULL basic block",
2739 INSN_UID (insn), bb->index);
2740 err = 1;
2744 /* Clean up. */
2745 return err;
2748 /* Verify the CFG and RTL consistency common for both underlying RTL and
2749 cfglayout RTL.
2751 Currently it does following checks:
2753 - overlapping of basic blocks
2754 - insns with wrong BLOCK_FOR_INSN pointers
2755 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2756 - tails of basic blocks (ensure that boundary is necessary)
2757 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2758 and NOTE_INSN_BASIC_BLOCK
2759 - verify that no fall_thru edge crosses hot/cold partition boundaries
2760 - verify that there are no pending RTL branch predictions
2761 - verify that hot blocks are not dominated by cold blocks
2763 In future it can be extended check a lot of other stuff as well
2764 (reachability of basic blocks, life information, etc. etc.). */
2766 static int
2767 rtl_verify_flow_info_1 (void)
2769 int err = 0;
2771 err |= rtl_verify_bb_pointers ();
2773 err |= rtl_verify_bb_insns ();
2775 err |= rtl_verify_edges ();
2777 return err;
2780 /* Walk the instruction chain and verify that bb head/end pointers
2781 are correct, and that instructions are in exactly one bb and have
2782 correct block pointers. */
2784 static int
2785 rtl_verify_bb_insn_chain (void)
2787 basic_block bb;
2788 int err = 0;
2789 rtx_insn *x;
2790 rtx_insn *last_head = get_last_insn ();
2791 basic_block *bb_info;
2792 const int max_uid = get_max_uid ();
2794 bb_info = XCNEWVEC (basic_block, max_uid);
2796 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2798 rtx_insn *head = BB_HEAD (bb);
2799 rtx_insn *end = BB_END (bb);
2801 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2803 /* Verify the end of the basic block is in the INSN chain. */
2804 if (x == end)
2805 break;
2807 /* And that the code outside of basic blocks has NULL bb field. */
2808 if (!BARRIER_P (x)
2809 && BLOCK_FOR_INSN (x) != NULL)
2811 error ("insn %d outside of basic blocks has non-NULL bb field",
2812 INSN_UID (x));
2813 err = 1;
2817 if (!x)
2819 error ("end insn %d for block %d not found in the insn stream",
2820 INSN_UID (end), bb->index);
2821 err = 1;
2824 /* Work backwards from the end to the head of the basic block
2825 to verify the head is in the RTL chain. */
2826 for (; x != NULL_RTX; x = PREV_INSN (x))
2828 /* While walking over the insn chain, verify insns appear
2829 in only one basic block. */
2830 if (bb_info[INSN_UID (x)] != NULL)
2832 error ("insn %d is in multiple basic blocks (%d and %d)",
2833 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2834 err = 1;
2837 bb_info[INSN_UID (x)] = bb;
2839 if (x == head)
2840 break;
2842 if (!x)
2844 error ("head insn %d for block %d not found in the insn stream",
2845 INSN_UID (head), bb->index);
2846 err = 1;
2849 last_head = PREV_INSN (x);
2852 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2854 /* Check that the code before the first basic block has NULL
2855 bb field. */
2856 if (!BARRIER_P (x)
2857 && BLOCK_FOR_INSN (x) != NULL)
2859 error ("insn %d outside of basic blocks has non-NULL bb field",
2860 INSN_UID (x));
2861 err = 1;
2864 free (bb_info);
2866 return err;
2869 /* Verify that fallthru edges point to adjacent blocks in layout order and
2870 that barriers exist after non-fallthru blocks. */
2872 static int
2873 rtl_verify_fallthru (void)
2875 basic_block bb;
2876 int err = 0;
2878 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2880 edge e;
2882 e = find_fallthru_edge (bb->succs);
2883 if (!e)
2885 rtx_insn *insn;
2887 /* Ensure existence of barrier in BB with no fallthru edges. */
2888 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2890 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2892 error ("missing barrier after block %i", bb->index);
2893 err = 1;
2894 break;
2896 if (BARRIER_P (insn))
2897 break;
2900 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2901 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2903 rtx_insn *insn;
2905 if (e->src->next_bb != e->dest)
2907 error
2908 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2909 e->src->index, e->dest->index);
2910 err = 1;
2912 else
2913 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2914 insn = NEXT_INSN (insn))
2915 if (BARRIER_P (insn) || INSN_P (insn))
2917 error ("verify_flow_info: Incorrect fallthru %i->%i",
2918 e->src->index, e->dest->index);
2919 fatal_insn ("wrong insn in the fallthru edge", insn);
2920 err = 1;
2925 return err;
2928 /* Verify that blocks are laid out in consecutive order. While walking the
2929 instructions, verify that all expected instructions are inside the basic
2930 blocks, and that all returns are followed by barriers. */
2932 static int
2933 rtl_verify_bb_layout (void)
2935 basic_block bb;
2936 int err = 0;
2937 rtx_insn *x;
2938 int num_bb_notes;
2939 rtx_insn * const rtx_first = get_insns ();
2940 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2942 num_bb_notes = 0;
2943 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2945 for (x = rtx_first; x; x = NEXT_INSN (x))
2947 if (NOTE_INSN_BASIC_BLOCK_P (x))
2949 bb = NOTE_BASIC_BLOCK (x);
2951 num_bb_notes++;
2952 if (bb != last_bb_seen->next_bb)
2953 internal_error ("basic blocks not laid down consecutively");
2955 curr_bb = last_bb_seen = bb;
2958 if (!curr_bb)
2960 switch (GET_CODE (x))
2962 case BARRIER:
2963 case NOTE:
2964 break;
2966 case CODE_LABEL:
2967 /* An ADDR_VEC is placed outside any basic block. */
2968 if (NEXT_INSN (x)
2969 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2970 x = NEXT_INSN (x);
2972 /* But in any case, non-deletable labels can appear anywhere. */
2973 break;
2975 default:
2976 fatal_insn ("insn outside basic block", x);
2980 if (JUMP_P (x)
2981 && returnjump_p (x) && ! condjump_p (x)
2982 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2983 fatal_insn ("return not followed by barrier", x);
2985 if (curr_bb && x == BB_END (curr_bb))
2986 curr_bb = NULL;
2989 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
2990 internal_error
2991 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2992 num_bb_notes, n_basic_blocks_for_fn (cfun));
2994 return err;
2997 /* Verify the CFG and RTL consistency common for both underlying RTL and
2998 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3000 Currently it does following checks:
3001 - all checks of rtl_verify_flow_info_1
3002 - test head/end pointers
3003 - check that blocks are laid out in consecutive order
3004 - check that all insns are in the basic blocks
3005 (except the switch handling code, barriers and notes)
3006 - check that all returns are followed by barriers
3007 - check that all fallthru edge points to the adjacent blocks
3008 - verify that there is a single hot/cold partition boundary after bbro */
3010 static int
3011 rtl_verify_flow_info (void)
3013 int err = 0;
3015 err |= rtl_verify_flow_info_1 ();
3017 err |= rtl_verify_bb_insn_chain ();
3019 err |= rtl_verify_fallthru ();
3021 err |= rtl_verify_bb_layout ();
3023 err |= verify_hot_cold_block_grouping ();
3025 return err;
3028 /* Assume that the preceding pass has possibly eliminated jump instructions
3029 or converted the unconditional jumps. Eliminate the edges from CFG.
3030 Return true if any edges are eliminated. */
3032 bool
3033 purge_dead_edges (basic_block bb)
3035 edge e;
3036 rtx_insn *insn = BB_END (bb);
3037 rtx note;
3038 bool purged = false;
3039 bool found;
3040 edge_iterator ei;
3042 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3044 insn = PREV_INSN (insn);
3045 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3047 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3048 if (NONJUMP_INSN_P (insn)
3049 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3051 rtx eqnote;
3053 if (! may_trap_p (PATTERN (insn))
3054 || ((eqnote = find_reg_equal_equiv_note (insn))
3055 && ! may_trap_p (XEXP (eqnote, 0))))
3056 remove_note (insn, note);
3059 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3060 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3062 bool remove = false;
3064 /* There are three types of edges we need to handle correctly here: EH
3065 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3066 latter can appear when nonlocal gotos are used. */
3067 if (e->flags & EDGE_ABNORMAL_CALL)
3069 if (!CALL_P (insn))
3070 remove = true;
3071 else if (can_nonlocal_goto (insn))
3073 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3075 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3077 else
3078 remove = true;
3080 else if (e->flags & EDGE_EH)
3081 remove = !can_throw_internal (insn);
3083 if (remove)
3085 remove_edge (e);
3086 df_set_bb_dirty (bb);
3087 purged = true;
3089 else
3090 ei_next (&ei);
3093 if (JUMP_P (insn))
3095 rtx note;
3096 edge b,f;
3097 edge_iterator ei;
3099 /* We do care only about conditional jumps and simplejumps. */
3100 if (!any_condjump_p (insn)
3101 && !returnjump_p (insn)
3102 && !simplejump_p (insn))
3103 return purged;
3105 /* Branch probability/prediction notes are defined only for
3106 condjumps. We've possibly turned condjump into simplejump. */
3107 if (simplejump_p (insn))
3109 note = find_reg_note (insn, REG_BR_PROB, NULL);
3110 if (note)
3111 remove_note (insn, note);
3112 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3113 remove_note (insn, note);
3116 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3118 /* Avoid abnormal flags to leak from computed jumps turned
3119 into simplejumps. */
3121 e->flags &= ~EDGE_ABNORMAL;
3123 /* See if this edge is one we should keep. */
3124 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3125 /* A conditional jump can fall through into the next
3126 block, so we should keep the edge. */
3128 ei_next (&ei);
3129 continue;
3131 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3132 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3133 /* If the destination block is the target of the jump,
3134 keep the edge. */
3136 ei_next (&ei);
3137 continue;
3139 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3140 && returnjump_p (insn))
3141 /* If the destination block is the exit block, and this
3142 instruction is a return, then keep the edge. */
3144 ei_next (&ei);
3145 continue;
3147 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3148 /* Keep the edges that correspond to exceptions thrown by
3149 this instruction and rematerialize the EDGE_ABNORMAL
3150 flag we just cleared above. */
3152 e->flags |= EDGE_ABNORMAL;
3153 ei_next (&ei);
3154 continue;
3157 /* We do not need this edge. */
3158 df_set_bb_dirty (bb);
3159 purged = true;
3160 remove_edge (e);
3163 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3164 return purged;
3166 if (dump_file)
3167 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3169 if (!optimize)
3170 return purged;
3172 /* Redistribute probabilities. */
3173 if (single_succ_p (bb))
3175 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3176 single_succ_edge (bb)->count = bb->count;
3178 else
3180 note = find_reg_note (insn, REG_BR_PROB, NULL);
3181 if (!note)
3182 return purged;
3184 b = BRANCH_EDGE (bb);
3185 f = FALLTHRU_EDGE (bb);
3186 b->probability = XINT (note, 0);
3187 f->probability = REG_BR_PROB_BASE - b->probability;
3188 /* Update these to use GCOV_COMPUTE_SCALE. */
3189 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3190 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3193 return purged;
3195 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3197 /* First, there should not be any EH or ABCALL edges resulting
3198 from non-local gotos and the like. If there were, we shouldn't
3199 have created the sibcall in the first place. Second, there
3200 should of course never have been a fallthru edge. */
3201 gcc_assert (single_succ_p (bb));
3202 gcc_assert (single_succ_edge (bb)->flags
3203 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3205 return 0;
3208 /* If we don't see a jump insn, we don't know exactly why the block would
3209 have been broken at this point. Look for a simple, non-fallthru edge,
3210 as these are only created by conditional branches. If we find such an
3211 edge we know that there used to be a jump here and can then safely
3212 remove all non-fallthru edges. */
3213 found = false;
3214 FOR_EACH_EDGE (e, ei, bb->succs)
3215 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3217 found = true;
3218 break;
3221 if (!found)
3222 return purged;
3224 /* Remove all but the fake and fallthru edges. The fake edge may be
3225 the only successor for this block in the case of noreturn
3226 calls. */
3227 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3229 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3231 df_set_bb_dirty (bb);
3232 remove_edge (e);
3233 purged = true;
3235 else
3236 ei_next (&ei);
3239 gcc_assert (single_succ_p (bb));
3241 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3242 single_succ_edge (bb)->count = bb->count;
3244 if (dump_file)
3245 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3246 bb->index);
3247 return purged;
3250 /* Search all basic blocks for potentially dead edges and purge them. Return
3251 true if some edge has been eliminated. */
3253 bool
3254 purge_all_dead_edges (void)
3256 int purged = false;
3257 basic_block bb;
3259 FOR_EACH_BB_FN (bb, cfun)
3261 bool purged_here = purge_dead_edges (bb);
3263 purged |= purged_here;
3266 return purged;
3269 /* This is used by a few passes that emit some instructions after abnormal
3270 calls, moving the basic block's end, while they in fact do want to emit
3271 them on the fallthru edge. Look for abnormal call edges, find backward
3272 the call in the block and insert the instructions on the edge instead.
3274 Similarly, handle instructions throwing exceptions internally.
3276 Return true when instructions have been found and inserted on edges. */
3278 bool
3279 fixup_abnormal_edges (void)
3281 bool inserted = false;
3282 basic_block bb;
3284 FOR_EACH_BB_FN (bb, cfun)
3286 edge e;
3287 edge_iterator ei;
3289 /* Look for cases we are interested in - calls or instructions causing
3290 exceptions. */
3291 FOR_EACH_EDGE (e, ei, bb->succs)
3292 if ((e->flags & EDGE_ABNORMAL_CALL)
3293 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3294 == (EDGE_ABNORMAL | EDGE_EH)))
3295 break;
3297 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3299 rtx_insn *insn;
3301 /* Get past the new insns generated. Allow notes, as the insns
3302 may be already deleted. */
3303 insn = BB_END (bb);
3304 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3305 && !can_throw_internal (insn)
3306 && insn != BB_HEAD (bb))
3307 insn = PREV_INSN (insn);
3309 if (CALL_P (insn) || can_throw_internal (insn))
3311 rtx_insn *stop, *next;
3313 e = find_fallthru_edge (bb->succs);
3315 stop = NEXT_INSN (BB_END (bb));
3316 BB_END (bb) = insn;
3318 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3320 next = NEXT_INSN (insn);
3321 if (INSN_P (insn))
3323 delete_insn (insn);
3325 /* Sometimes there's still the return value USE.
3326 If it's placed after a trapping call (i.e. that
3327 call is the last insn anyway), we have no fallthru
3328 edge. Simply delete this use and don't try to insert
3329 on the non-existent edge. */
3330 if (GET_CODE (PATTERN (insn)) != USE)
3332 /* We're not deleting it, we're moving it. */
3333 insn->set_undeleted ();
3334 SET_PREV_INSN (insn) = NULL_RTX;
3335 SET_NEXT_INSN (insn) = NULL_RTX;
3337 insert_insn_on_edge (insn, e);
3338 inserted = true;
3341 else if (!BARRIER_P (insn))
3342 set_block_for_insn (insn, NULL);
3346 /* It may be that we don't find any trapping insn. In this
3347 case we discovered quite late that the insn that had been
3348 marked as can_throw_internal in fact couldn't trap at all.
3349 So we should in fact delete the EH edges out of the block. */
3350 else
3351 purge_dead_edges (bb);
3355 return inserted;
3358 /* Cut the insns from FIRST to LAST out of the insns stream. */
3360 rtx_insn *
3361 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3363 rtx_insn *prevfirst = PREV_INSN (first);
3364 rtx_insn *nextlast = NEXT_INSN (last);
3366 SET_PREV_INSN (first) = NULL;
3367 SET_NEXT_INSN (last) = NULL;
3368 if (prevfirst)
3369 SET_NEXT_INSN (prevfirst) = nextlast;
3370 if (nextlast)
3371 SET_PREV_INSN (nextlast) = prevfirst;
3372 else
3373 set_last_insn (prevfirst);
3374 if (!prevfirst)
3375 set_first_insn (nextlast);
3376 return first;
3379 /* Skip over inter-block insns occurring after BB which are typically
3380 associated with BB (e.g., barriers). If there are any such insns,
3381 we return the last one. Otherwise, we return the end of BB. */
3383 static rtx_insn *
3384 skip_insns_after_block (basic_block bb)
3386 rtx_insn *insn, *last_insn, *next_head, *prev;
3388 next_head = NULL;
3389 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3390 next_head = BB_HEAD (bb->next_bb);
3392 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3394 if (insn == next_head)
3395 break;
3397 switch (GET_CODE (insn))
3399 case BARRIER:
3400 last_insn = insn;
3401 continue;
3403 case NOTE:
3404 switch (NOTE_KIND (insn))
3406 case NOTE_INSN_BLOCK_END:
3407 gcc_unreachable ();
3408 continue;
3409 default:
3410 continue;
3411 break;
3413 break;
3415 case CODE_LABEL:
3416 if (NEXT_INSN (insn)
3417 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3419 insn = NEXT_INSN (insn);
3420 last_insn = insn;
3421 continue;
3423 break;
3425 default:
3426 break;
3429 break;
3432 /* It is possible to hit contradictory sequence. For instance:
3434 jump_insn
3435 NOTE_INSN_BLOCK_BEG
3436 barrier
3438 Where barrier belongs to jump_insn, but the note does not. This can be
3439 created by removing the basic block originally following
3440 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3442 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3444 prev = PREV_INSN (insn);
3445 if (NOTE_P (insn))
3446 switch (NOTE_KIND (insn))
3448 case NOTE_INSN_BLOCK_END:
3449 gcc_unreachable ();
3450 break;
3451 case NOTE_INSN_DELETED:
3452 case NOTE_INSN_DELETED_LABEL:
3453 case NOTE_INSN_DELETED_DEBUG_LABEL:
3454 continue;
3455 default:
3456 reorder_insns (insn, insn, last_insn);
3460 return last_insn;
3463 /* Locate or create a label for a given basic block. */
3465 static rtx
3466 label_for_bb (basic_block bb)
3468 rtx label = BB_HEAD (bb);
3470 if (!LABEL_P (label))
3472 if (dump_file)
3473 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3475 label = block_label (bb);
3478 return label;
3481 /* Locate the effective beginning and end of the insn chain for each
3482 block, as defined by skip_insns_after_block above. */
3484 static void
3485 record_effective_endpoints (void)
3487 rtx_insn *next_insn;
3488 basic_block bb;
3489 rtx_insn *insn;
3491 for (insn = get_insns ();
3492 insn
3493 && NOTE_P (insn)
3494 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3495 insn = NEXT_INSN (insn))
3496 continue;
3497 /* No basic blocks at all? */
3498 gcc_assert (insn);
3500 if (PREV_INSN (insn))
3501 cfg_layout_function_header =
3502 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3503 else
3504 cfg_layout_function_header = NULL;
3506 next_insn = get_insns ();
3507 FOR_EACH_BB_FN (bb, cfun)
3509 rtx_insn *end;
3511 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3512 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3513 PREV_INSN (BB_HEAD (bb)));
3514 end = skip_insns_after_block (bb);
3515 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3516 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3517 next_insn = NEXT_INSN (BB_END (bb));
3520 cfg_layout_function_footer = next_insn;
3521 if (cfg_layout_function_footer)
3522 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3525 namespace {
3527 const pass_data pass_data_into_cfg_layout_mode =
3529 RTL_PASS, /* type */
3530 "into_cfglayout", /* name */
3531 OPTGROUP_NONE, /* optinfo_flags */
3532 TV_CFG, /* tv_id */
3533 0, /* properties_required */
3534 PROP_cfglayout, /* properties_provided */
3535 0, /* properties_destroyed */
3536 0, /* todo_flags_start */
3537 0, /* todo_flags_finish */
3540 class pass_into_cfg_layout_mode : public rtl_opt_pass
3542 public:
3543 pass_into_cfg_layout_mode (gcc::context *ctxt)
3544 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3547 /* opt_pass methods: */
3548 virtual unsigned int execute (function *)
3550 cfg_layout_initialize (0);
3551 return 0;
3554 }; // class pass_into_cfg_layout_mode
3556 } // anon namespace
3558 rtl_opt_pass *
3559 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3561 return new pass_into_cfg_layout_mode (ctxt);
3564 namespace {
3566 const pass_data pass_data_outof_cfg_layout_mode =
3568 RTL_PASS, /* type */
3569 "outof_cfglayout", /* name */
3570 OPTGROUP_NONE, /* optinfo_flags */
3571 TV_CFG, /* tv_id */
3572 0, /* properties_required */
3573 0, /* properties_provided */
3574 PROP_cfglayout, /* properties_destroyed */
3575 0, /* todo_flags_start */
3576 0, /* todo_flags_finish */
3579 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3581 public:
3582 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3583 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3586 /* opt_pass methods: */
3587 virtual unsigned int execute (function *);
3589 }; // class pass_outof_cfg_layout_mode
3591 unsigned int
3592 pass_outof_cfg_layout_mode::execute (function *fun)
3594 basic_block bb;
3596 FOR_EACH_BB_FN (bb, fun)
3597 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3598 bb->aux = bb->next_bb;
3600 cfg_layout_finalize ();
3602 return 0;
3605 } // anon namespace
3607 rtl_opt_pass *
3608 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3610 return new pass_outof_cfg_layout_mode (ctxt);
3614 /* Link the basic blocks in the correct order, compacting the basic
3615 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3616 function also clears the basic block header and footer fields.
3618 This function is usually called after a pass (e.g. tracer) finishes
3619 some transformations while in cfglayout mode. The required sequence
3620 of the basic blocks is in a linked list along the bb->aux field.
3621 This functions re-links the basic block prev_bb and next_bb pointers
3622 accordingly, and it compacts and renumbers the blocks.
3624 FIXME: This currently works only for RTL, but the only RTL-specific
3625 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3626 to GIMPLE a long time ago, but it doesn't relink the basic block
3627 chain. It could do that (to give better initial RTL) if this function
3628 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3630 void
3631 relink_block_chain (bool stay_in_cfglayout_mode)
3633 basic_block bb, prev_bb;
3634 int index;
3636 /* Maybe dump the re-ordered sequence. */
3637 if (dump_file)
3639 fprintf (dump_file, "Reordered sequence:\n");
3640 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3641 NUM_FIXED_BLOCKS;
3643 bb = (basic_block) bb->aux, index++)
3645 fprintf (dump_file, " %i ", index);
3646 if (get_bb_original (bb))
3647 fprintf (dump_file, "duplicate of %i ",
3648 get_bb_original (bb)->index);
3649 else if (forwarder_block_p (bb)
3650 && !LABEL_P (BB_HEAD (bb)))
3651 fprintf (dump_file, "compensation ");
3652 else
3653 fprintf (dump_file, "bb %i ", bb->index);
3654 fprintf (dump_file, " [%i]\n", bb->frequency);
3658 /* Now reorder the blocks. */
3659 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3660 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3661 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3663 bb->prev_bb = prev_bb;
3664 prev_bb->next_bb = bb;
3666 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3667 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3669 /* Then, clean up the aux fields. */
3670 FOR_ALL_BB_FN (bb, cfun)
3672 bb->aux = NULL;
3673 if (!stay_in_cfglayout_mode)
3674 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3677 /* Maybe reset the original copy tables, they are not valid anymore
3678 when we renumber the basic blocks in compact_blocks. If we are
3679 are going out of cfglayout mode, don't re-allocate the tables. */
3680 free_original_copy_tables ();
3681 if (stay_in_cfglayout_mode)
3682 initialize_original_copy_tables ();
3684 /* Finally, put basic_block_info in the new order. */
3685 compact_blocks ();
3689 /* Given a reorder chain, rearrange the code to match. */
3691 static void
3692 fixup_reorder_chain (void)
3694 basic_block bb;
3695 rtx_insn *insn = NULL;
3697 if (cfg_layout_function_header)
3699 set_first_insn (cfg_layout_function_header);
3700 insn = cfg_layout_function_header;
3701 while (NEXT_INSN (insn))
3702 insn = NEXT_INSN (insn);
3705 /* First do the bulk reordering -- rechain the blocks without regard to
3706 the needed changes to jumps and labels. */
3708 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3709 bb->aux)
3711 if (BB_HEADER (bb))
3713 if (insn)
3714 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3715 else
3716 set_first_insn (BB_HEADER (bb));
3717 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3718 insn = BB_HEADER (bb);
3719 while (NEXT_INSN (insn))
3720 insn = NEXT_INSN (insn);
3722 if (insn)
3723 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3724 else
3725 set_first_insn (BB_HEAD (bb));
3726 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3727 insn = BB_END (bb);
3728 if (BB_FOOTER (bb))
3730 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3731 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3732 while (NEXT_INSN (insn))
3733 insn = NEXT_INSN (insn);
3737 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3738 if (cfg_layout_function_footer)
3739 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3741 while (NEXT_INSN (insn))
3742 insn = NEXT_INSN (insn);
3744 set_last_insn (insn);
3745 #ifdef ENABLE_CHECKING
3746 verify_insn_chain ();
3747 #endif
3749 /* Now add jumps and labels as needed to match the blocks new
3750 outgoing edges. */
3752 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3753 bb->aux)
3755 edge e_fall, e_taken, e;
3756 rtx_insn *bb_end_insn;
3757 rtx ret_label = NULL_RTX;
3758 basic_block nb;
3759 edge_iterator ei;
3761 if (EDGE_COUNT (bb->succs) == 0)
3762 continue;
3764 /* Find the old fallthru edge, and another non-EH edge for
3765 a taken jump. */
3766 e_taken = e_fall = NULL;
3768 FOR_EACH_EDGE (e, ei, bb->succs)
3769 if (e->flags & EDGE_FALLTHRU)
3770 e_fall = e;
3771 else if (! (e->flags & EDGE_EH))
3772 e_taken = e;
3774 bb_end_insn = BB_END (bb);
3775 if (JUMP_P (bb_end_insn))
3777 ret_label = JUMP_LABEL (bb_end_insn);
3778 if (any_condjump_p (bb_end_insn))
3780 /* This might happen if the conditional jump has side
3781 effects and could therefore not be optimized away.
3782 Make the basic block to end with a barrier in order
3783 to prevent rtl_verify_flow_info from complaining. */
3784 if (!e_fall)
3786 gcc_assert (!onlyjump_p (bb_end_insn)
3787 || returnjump_p (bb_end_insn)
3788 || (e_taken->flags & EDGE_CROSSING));
3789 emit_barrier_after (bb_end_insn);
3790 continue;
3793 /* If the old fallthru is still next, nothing to do. */
3794 if (bb->aux == e_fall->dest
3795 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3796 continue;
3798 /* The degenerated case of conditional jump jumping to the next
3799 instruction can happen for jumps with side effects. We need
3800 to construct a forwarder block and this will be done just
3801 fine by force_nonfallthru below. */
3802 if (!e_taken)
3805 /* There is another special case: if *neither* block is next,
3806 such as happens at the very end of a function, then we'll
3807 need to add a new unconditional jump. Choose the taken
3808 edge based on known or assumed probability. */
3809 else if (bb->aux != e_taken->dest)
3811 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3813 if (note
3814 && XINT (note, 0) < REG_BR_PROB_BASE / 2
3815 && invert_jump (bb_end_insn,
3816 (e_fall->dest
3817 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3818 ? NULL_RTX
3819 : label_for_bb (e_fall->dest)), 0))
3821 e_fall->flags &= ~EDGE_FALLTHRU;
3822 gcc_checking_assert (could_fall_through
3823 (e_taken->src, e_taken->dest));
3824 e_taken->flags |= EDGE_FALLTHRU;
3825 update_br_prob_note (bb);
3826 e = e_fall, e_fall = e_taken, e_taken = e;
3830 /* If the "jumping" edge is a crossing edge, and the fall
3831 through edge is non-crossing, leave things as they are. */
3832 else if ((e_taken->flags & EDGE_CROSSING)
3833 && !(e_fall->flags & EDGE_CROSSING))
3834 continue;
3836 /* Otherwise we can try to invert the jump. This will
3837 basically never fail, however, keep up the pretense. */
3838 else if (invert_jump (bb_end_insn,
3839 (e_fall->dest
3840 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3841 ? NULL_RTX
3842 : label_for_bb (e_fall->dest)), 0))
3844 e_fall->flags &= ~EDGE_FALLTHRU;
3845 gcc_checking_assert (could_fall_through
3846 (e_taken->src, e_taken->dest));
3847 e_taken->flags |= EDGE_FALLTHRU;
3848 update_br_prob_note (bb);
3849 if (LABEL_NUSES (ret_label) == 0
3850 && single_pred_p (e_taken->dest))
3851 delete_insn (ret_label);
3852 continue;
3855 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3857 /* If the old fallthru is still next or if
3858 asm goto doesn't have a fallthru (e.g. when followed by
3859 __builtin_unreachable ()), nothing to do. */
3860 if (! e_fall
3861 || bb->aux == e_fall->dest
3862 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3863 continue;
3865 /* Otherwise we'll have to use the fallthru fixup below. */
3867 else
3869 /* Otherwise we have some return, switch or computed
3870 jump. In the 99% case, there should not have been a
3871 fallthru edge. */
3872 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3873 continue;
3876 else
3878 /* No fallthru implies a noreturn function with EH edges, or
3879 something similarly bizarre. In any case, we don't need to
3880 do anything. */
3881 if (! e_fall)
3882 continue;
3884 /* If the fallthru block is still next, nothing to do. */
3885 if (bb->aux == e_fall->dest)
3886 continue;
3888 /* A fallthru to exit block. */
3889 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3890 continue;
3893 /* We got here if we need to add a new jump insn.
3894 Note force_nonfallthru can delete E_FALL and thus we have to
3895 save E_FALL->src prior to the call to force_nonfallthru. */
3896 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3897 if (nb)
3899 nb->aux = bb->aux;
3900 bb->aux = nb;
3901 /* Don't process this new block. */
3902 bb = nb;
3906 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3908 /* Annoying special case - jump around dead jumptables left in the code. */
3909 FOR_EACH_BB_FN (bb, cfun)
3911 edge e = find_fallthru_edge (bb->succs);
3913 if (e && !can_fallthru (e->src, e->dest))
3914 force_nonfallthru (e);
3917 /* Ensure goto_locus from edges has some instructions with that locus
3918 in RTL. */
3919 if (!optimize)
3920 FOR_EACH_BB_FN (bb, cfun)
3922 edge e;
3923 edge_iterator ei;
3925 FOR_EACH_EDGE (e, ei, bb->succs)
3926 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3927 && !(e->flags & EDGE_ABNORMAL))
3929 edge e2;
3930 edge_iterator ei2;
3931 basic_block dest, nb;
3932 rtx_insn *end;
3934 insn = BB_END (e->src);
3935 end = PREV_INSN (BB_HEAD (e->src));
3936 while (insn != end
3937 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3938 insn = PREV_INSN (insn);
3939 if (insn != end
3940 && INSN_LOCATION (insn) == e->goto_locus)
3941 continue;
3942 if (simplejump_p (BB_END (e->src))
3943 && !INSN_HAS_LOCATION (BB_END (e->src)))
3945 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3946 continue;
3948 dest = e->dest;
3949 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3951 /* Non-fallthru edges to the exit block cannot be split. */
3952 if (!(e->flags & EDGE_FALLTHRU))
3953 continue;
3955 else
3957 insn = BB_HEAD (dest);
3958 end = NEXT_INSN (BB_END (dest));
3959 while (insn != end && !NONDEBUG_INSN_P (insn))
3960 insn = NEXT_INSN (insn);
3961 if (insn != end && INSN_HAS_LOCATION (insn)
3962 && INSN_LOCATION (insn) == e->goto_locus)
3963 continue;
3965 nb = split_edge (e);
3966 if (!INSN_P (BB_END (nb)))
3967 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3968 nb);
3969 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3971 /* If there are other incoming edges to the destination block
3972 with the same goto locus, redirect them to the new block as
3973 well, this can prevent other such blocks from being created
3974 in subsequent iterations of the loop. */
3975 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3976 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3977 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3978 && e->goto_locus == e2->goto_locus)
3979 redirect_edge_and_branch (e2, nb);
3980 else
3981 ei_next (&ei2);
3986 /* Perform sanity checks on the insn chain.
3987 1. Check that next/prev pointers are consistent in both the forward and
3988 reverse direction.
3989 2. Count insns in chain, going both directions, and check if equal.
3990 3. Check that get_last_insn () returns the actual end of chain. */
3992 DEBUG_FUNCTION void
3993 verify_insn_chain (void)
3995 rtx_insn *x, *prevx, *nextx;
3996 int insn_cnt1, insn_cnt2;
3998 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3999 x != 0;
4000 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4001 gcc_assert (PREV_INSN (x) == prevx);
4003 gcc_assert (prevx == get_last_insn ());
4005 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4006 x != 0;
4007 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4008 gcc_assert (NEXT_INSN (x) == nextx);
4010 gcc_assert (insn_cnt1 == insn_cnt2);
4013 /* If we have assembler epilogues, the block falling through to exit must
4014 be the last one in the reordered chain when we reach final. Ensure
4015 that this condition is met. */
4016 static void
4017 fixup_fallthru_exit_predecessor (void)
4019 edge e;
4020 basic_block bb = NULL;
4022 /* This transformation is not valid before reload, because we might
4023 separate a call from the instruction that copies the return
4024 value. */
4025 gcc_assert (reload_completed);
4027 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4028 if (e)
4029 bb = e->src;
4031 if (bb && bb->aux)
4033 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4035 /* If the very first block is the one with the fall-through exit
4036 edge, we have to split that block. */
4037 if (c == bb)
4039 bb = split_block (bb, NULL)->dest;
4040 bb->aux = c->aux;
4041 c->aux = bb;
4042 BB_FOOTER (bb) = BB_FOOTER (c);
4043 BB_FOOTER (c) = NULL;
4046 while (c->aux != bb)
4047 c = (basic_block) c->aux;
4049 c->aux = bb->aux;
4050 while (c->aux)
4051 c = (basic_block) c->aux;
4053 c->aux = bb;
4054 bb->aux = NULL;
4058 /* In case there are more than one fallthru predecessors of exit, force that
4059 there is only one. */
4061 static void
4062 force_one_exit_fallthru (void)
4064 edge e, predecessor = NULL;
4065 bool more = false;
4066 edge_iterator ei;
4067 basic_block forwarder, bb;
4069 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4070 if (e->flags & EDGE_FALLTHRU)
4072 if (predecessor == NULL)
4073 predecessor = e;
4074 else
4076 more = true;
4077 break;
4081 if (!more)
4082 return;
4084 /* Exit has several fallthru predecessors. Create a forwarder block for
4085 them. */
4086 forwarder = split_edge (predecessor);
4087 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4088 (e = ei_safe_edge (ei)); )
4090 if (e->src == forwarder
4091 || !(e->flags & EDGE_FALLTHRU))
4092 ei_next (&ei);
4093 else
4094 redirect_edge_and_branch_force (e, forwarder);
4097 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4098 exit block. */
4099 FOR_EACH_BB_FN (bb, cfun)
4101 if (bb->aux == NULL && bb != forwarder)
4103 bb->aux = forwarder;
4104 break;
4109 /* Return true in case it is possible to duplicate the basic block BB. */
4111 static bool
4112 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4114 /* Do not attempt to duplicate tablejumps, as we need to unshare
4115 the dispatch table. This is difficult to do, as the instructions
4116 computing jump destination may be hoisted outside the basic block. */
4117 if (tablejump_p (BB_END (bb), NULL, NULL))
4118 return false;
4120 /* Do not duplicate blocks containing insns that can't be copied. */
4121 if (targetm.cannot_copy_insn_p)
4123 rtx_insn *insn = BB_HEAD (bb);
4124 while (1)
4126 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4127 return false;
4128 if (insn == BB_END (bb))
4129 break;
4130 insn = NEXT_INSN (insn);
4134 return true;
4137 rtx_insn *
4138 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4140 rtx_insn *insn, *next, *copy;
4141 rtx_note *last;
4143 /* Avoid updating of boundaries of previous basic block. The
4144 note will get removed from insn stream in fixup. */
4145 last = emit_note (NOTE_INSN_DELETED);
4147 /* Create copy at the end of INSN chain. The chain will
4148 be reordered later. */
4149 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4151 switch (GET_CODE (insn))
4153 case DEBUG_INSN:
4154 /* Don't duplicate label debug insns. */
4155 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4156 break;
4157 /* FALLTHRU */
4158 case INSN:
4159 case CALL_INSN:
4160 case JUMP_INSN:
4161 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4162 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4163 && ANY_RETURN_P (JUMP_LABEL (insn)))
4164 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4165 maybe_copy_prologue_epilogue_insn (insn, copy);
4166 break;
4168 case JUMP_TABLE_DATA:
4169 /* Avoid copying of dispatch tables. We never duplicate
4170 tablejumps, so this can hit only in case the table got
4171 moved far from original jump.
4172 Avoid copying following barrier as well if any
4173 (and debug insns in between). */
4174 for (next = NEXT_INSN (insn);
4175 next != NEXT_INSN (to);
4176 next = NEXT_INSN (next))
4177 if (!DEBUG_INSN_P (next))
4178 break;
4179 if (next != NEXT_INSN (to) && BARRIER_P (next))
4180 insn = next;
4181 break;
4183 case CODE_LABEL:
4184 break;
4186 case BARRIER:
4187 emit_barrier ();
4188 break;
4190 case NOTE:
4191 switch (NOTE_KIND (insn))
4193 /* In case prologue is empty and function contain label
4194 in first BB, we may want to copy the block. */
4195 case NOTE_INSN_PROLOGUE_END:
4197 case NOTE_INSN_DELETED:
4198 case NOTE_INSN_DELETED_LABEL:
4199 case NOTE_INSN_DELETED_DEBUG_LABEL:
4200 /* No problem to strip these. */
4201 case NOTE_INSN_FUNCTION_BEG:
4202 /* There is always just single entry to function. */
4203 case NOTE_INSN_BASIC_BLOCK:
4204 /* We should only switch text sections once. */
4205 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4206 break;
4208 case NOTE_INSN_EPILOGUE_BEG:
4209 emit_note_copy (as_a <rtx_note *> (insn));
4210 break;
4212 default:
4213 /* All other notes should have already been eliminated. */
4214 gcc_unreachable ();
4216 break;
4217 default:
4218 gcc_unreachable ();
4221 insn = NEXT_INSN (last);
4222 delete_insn (last);
4223 return insn;
4226 /* Create a duplicate of the basic block BB. */
4228 static basic_block
4229 cfg_layout_duplicate_bb (basic_block bb)
4231 rtx_insn *insn;
4232 basic_block new_bb;
4234 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4235 new_bb = create_basic_block (insn,
4236 insn ? get_last_insn () : NULL,
4237 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4239 BB_COPY_PARTITION (new_bb, bb);
4240 if (BB_HEADER (bb))
4242 insn = BB_HEADER (bb);
4243 while (NEXT_INSN (insn))
4244 insn = NEXT_INSN (insn);
4245 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4246 if (insn)
4247 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4250 if (BB_FOOTER (bb))
4252 insn = BB_FOOTER (bb);
4253 while (NEXT_INSN (insn))
4254 insn = NEXT_INSN (insn);
4255 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4256 if (insn)
4257 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4260 return new_bb;
4264 /* Main entry point to this module - initialize the datastructures for
4265 CFG layout changes. It keeps LOOPS up-to-date if not null.
4267 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4269 void
4270 cfg_layout_initialize (unsigned int flags)
4272 rtx_insn_list *x;
4273 basic_block bb;
4275 /* Once bb partitioning is complete, cfg layout mode should not be
4276 re-entered. Entering cfg layout mode may require fixups. As an
4277 example, if edge forwarding performed when optimizing the cfg
4278 layout required moving a block from the hot to the cold
4279 section. This would create an illegal partitioning unless some
4280 manual fixup was performed. */
4281 gcc_assert (!(crtl->bb_reorder_complete
4282 && flag_reorder_blocks_and_partition));
4284 initialize_original_copy_tables ();
4286 cfg_layout_rtl_register_cfg_hooks ();
4288 record_effective_endpoints ();
4290 /* Make sure that the targets of non local gotos are marked. */
4291 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4293 bb = BLOCK_FOR_INSN (x->insn ());
4294 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4297 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4300 /* Splits superblocks. */
4301 void
4302 break_superblocks (void)
4304 sbitmap superblocks;
4305 bool need = false;
4306 basic_block bb;
4308 superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
4309 bitmap_clear (superblocks);
4311 FOR_EACH_BB_FN (bb, cfun)
4312 if (bb->flags & BB_SUPERBLOCK)
4314 bb->flags &= ~BB_SUPERBLOCK;
4315 bitmap_set_bit (superblocks, bb->index);
4316 need = true;
4319 if (need)
4321 rebuild_jump_labels (get_insns ());
4322 find_many_sub_basic_blocks (superblocks);
4325 free (superblocks);
4328 /* Finalize the changes: reorder insn list according to the sequence specified
4329 by aux pointers, enter compensation code, rebuild scope forest. */
4331 void
4332 cfg_layout_finalize (void)
4334 #ifdef ENABLE_CHECKING
4335 verify_flow_info ();
4336 #endif
4337 force_one_exit_fallthru ();
4338 rtl_register_cfg_hooks ();
4339 if (reload_completed
4340 #ifdef HAVE_epilogue
4341 && !HAVE_epilogue
4342 #endif
4344 fixup_fallthru_exit_predecessor ();
4345 fixup_reorder_chain ();
4347 rebuild_jump_labels (get_insns ());
4348 delete_dead_jumptables ();
4350 #ifdef ENABLE_CHECKING
4351 verify_insn_chain ();
4352 verify_flow_info ();
4353 #endif
4357 /* Same as split_block but update cfg_layout structures. */
4359 static basic_block
4360 cfg_layout_split_block (basic_block bb, void *insnp)
4362 rtx insn = (rtx) insnp;
4363 basic_block new_bb = rtl_split_block (bb, insn);
4365 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4366 BB_FOOTER (bb) = NULL;
4368 return new_bb;
4371 /* Redirect Edge to DEST. */
4372 static edge
4373 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4375 basic_block src = e->src;
4376 edge ret;
4378 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4379 return NULL;
4381 if (e->dest == dest)
4382 return e;
4384 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4385 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4387 df_set_bb_dirty (src);
4388 return ret;
4391 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4392 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4394 if (dump_file)
4395 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4396 e->src->index, dest->index);
4398 df_set_bb_dirty (e->src);
4399 redirect_edge_succ (e, dest);
4400 return e;
4403 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4404 in the case the basic block appears to be in sequence. Avoid this
4405 transformation. */
4407 if (e->flags & EDGE_FALLTHRU)
4409 /* Redirect any branch edges unified with the fallthru one. */
4410 if (JUMP_P (BB_END (src))
4411 && label_is_jump_target_p (BB_HEAD (e->dest),
4412 BB_END (src)))
4414 edge redirected;
4416 if (dump_file)
4417 fprintf (dump_file, "Fallthru edge unified with branch "
4418 "%i->%i redirected to %i\n",
4419 e->src->index, e->dest->index, dest->index);
4420 e->flags &= ~EDGE_FALLTHRU;
4421 redirected = redirect_branch_edge (e, dest);
4422 gcc_assert (redirected);
4423 redirected->flags |= EDGE_FALLTHRU;
4424 df_set_bb_dirty (redirected->src);
4425 return redirected;
4427 /* In case we are redirecting fallthru edge to the branch edge
4428 of conditional jump, remove it. */
4429 if (EDGE_COUNT (src->succs) == 2)
4431 /* Find the edge that is different from E. */
4432 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4434 if (s->dest == dest
4435 && any_condjump_p (BB_END (src))
4436 && onlyjump_p (BB_END (src)))
4437 delete_insn (BB_END (src));
4439 if (dump_file)
4440 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4441 e->src->index, e->dest->index, dest->index);
4442 ret = redirect_edge_succ_nodup (e, dest);
4444 else
4445 ret = redirect_branch_edge (e, dest);
4447 /* We don't want simplejumps in the insn stream during cfglayout. */
4448 gcc_assert (!simplejump_p (BB_END (src)));
4450 df_set_bb_dirty (src);
4451 return ret;
4454 /* Simple wrapper as we always can redirect fallthru edges. */
4455 static basic_block
4456 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4458 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4460 gcc_assert (redirected);
4461 return NULL;
4464 /* Same as delete_basic_block but update cfg_layout structures. */
4466 static void
4467 cfg_layout_delete_block (basic_block bb)
4469 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4470 rtx_insn **to;
4472 if (BB_HEADER (bb))
4474 next = BB_HEAD (bb);
4475 if (prev)
4476 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4477 else
4478 set_first_insn (BB_HEADER (bb));
4479 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4480 insn = BB_HEADER (bb);
4481 while (NEXT_INSN (insn))
4482 insn = NEXT_INSN (insn);
4483 SET_NEXT_INSN (insn) = next;
4484 SET_PREV_INSN (next) = insn;
4486 next = NEXT_INSN (BB_END (bb));
4487 if (BB_FOOTER (bb))
4489 insn = BB_FOOTER (bb);
4490 while (insn)
4492 if (BARRIER_P (insn))
4494 if (PREV_INSN (insn))
4495 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4496 else
4497 BB_FOOTER (bb) = NEXT_INSN (insn);
4498 if (NEXT_INSN (insn))
4499 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4501 if (LABEL_P (insn))
4502 break;
4503 insn = NEXT_INSN (insn);
4505 if (BB_FOOTER (bb))
4507 insn = BB_END (bb);
4508 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4509 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4510 while (NEXT_INSN (insn))
4511 insn = NEXT_INSN (insn);
4512 SET_NEXT_INSN (insn) = next;
4513 if (next)
4514 SET_PREV_INSN (next) = insn;
4515 else
4516 set_last_insn (insn);
4519 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4520 to = &BB_HEADER (bb->next_bb);
4521 else
4522 to = &cfg_layout_function_footer;
4524 rtl_delete_block (bb);
4526 if (prev)
4527 prev = NEXT_INSN (prev);
4528 else
4529 prev = get_insns ();
4530 if (next)
4531 next = PREV_INSN (next);
4532 else
4533 next = get_last_insn ();
4535 if (next && NEXT_INSN (next) != prev)
4537 remaints = unlink_insn_chain (prev, next);
4538 insn = remaints;
4539 while (NEXT_INSN (insn))
4540 insn = NEXT_INSN (insn);
4541 SET_NEXT_INSN (insn) = *to;
4542 if (*to)
4543 SET_PREV_INSN (*to) = insn;
4544 *to = remaints;
4548 /* Return true when blocks A and B can be safely merged. */
4550 static bool
4551 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4553 /* If we are partitioning hot/cold basic blocks, we don't want to
4554 mess up unconditional or indirect jumps that cross between hot
4555 and cold sections.
4557 Basic block partitioning may result in some jumps that appear to
4558 be optimizable (or blocks that appear to be mergeable), but which really
4559 must be left untouched (they are required to make it safely across
4560 partition boundaries). See the comments at the top of
4561 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4563 if (BB_PARTITION (a) != BB_PARTITION (b))
4564 return false;
4566 /* Protect the loop latches. */
4567 if (current_loops && b->loop_father->latch == b)
4568 return false;
4570 /* If we would end up moving B's instructions, make sure it doesn't fall
4571 through into the exit block, since we cannot recover from a fallthrough
4572 edge into the exit block occurring in the middle of a function. */
4573 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4575 edge e = find_fallthru_edge (b->succs);
4576 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4577 return false;
4580 /* There must be exactly one edge in between the blocks. */
4581 return (single_succ_p (a)
4582 && single_succ (a) == b
4583 && single_pred_p (b) == 1
4584 && a != b
4585 /* Must be simple edge. */
4586 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4587 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4588 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4589 /* If the jump insn has side effects, we can't kill the edge.
4590 When not optimizing, try_redirect_by_replacing_jump will
4591 not allow us to redirect an edge by replacing a table jump. */
4592 && (!JUMP_P (BB_END (a))
4593 || ((!optimize || reload_completed)
4594 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4597 /* Merge block A and B. The blocks must be mergeable. */
4599 static void
4600 cfg_layout_merge_blocks (basic_block a, basic_block b)
4602 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4603 rtx_insn *insn;
4605 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4607 if (dump_file)
4608 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4609 a->index);
4611 /* If there was a CODE_LABEL beginning B, delete it. */
4612 if (LABEL_P (BB_HEAD (b)))
4614 delete_insn (BB_HEAD (b));
4617 /* We should have fallthru edge in a, or we can do dummy redirection to get
4618 it cleaned up. */
4619 if (JUMP_P (BB_END (a)))
4620 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4621 gcc_assert (!JUMP_P (BB_END (a)));
4623 /* When not optimizing and the edge is the only place in RTL which holds
4624 some unique locus, emit a nop with that locus in between. */
4625 if (!optimize)
4626 emit_nop_for_unique_locus_between (a, b);
4628 /* Move things from b->footer after a->footer. */
4629 if (BB_FOOTER (b))
4631 if (!BB_FOOTER (a))
4632 BB_FOOTER (a) = BB_FOOTER (b);
4633 else
4635 rtx_insn *last = BB_FOOTER (a);
4637 while (NEXT_INSN (last))
4638 last = NEXT_INSN (last);
4639 SET_NEXT_INSN (last) = BB_FOOTER (b);
4640 SET_PREV_INSN (BB_FOOTER (b)) = last;
4642 BB_FOOTER (b) = NULL;
4645 /* Move things from b->header before a->footer.
4646 Note that this may include dead tablejump data, but we don't clean
4647 those up until we go out of cfglayout mode. */
4648 if (BB_HEADER (b))
4650 if (! BB_FOOTER (a))
4651 BB_FOOTER (a) = BB_HEADER (b);
4652 else
4654 rtx_insn *last = BB_HEADER (b);
4656 while (NEXT_INSN (last))
4657 last = NEXT_INSN (last);
4658 SET_NEXT_INSN (last) = BB_FOOTER (a);
4659 SET_PREV_INSN (BB_FOOTER (a)) = last;
4660 BB_FOOTER (a) = BB_HEADER (b);
4662 BB_HEADER (b) = NULL;
4665 /* In the case basic blocks are not adjacent, move them around. */
4666 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4668 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4670 emit_insn_after_noloc (insn, BB_END (a), a);
4672 /* Otherwise just re-associate the instructions. */
4673 else
4675 insn = BB_HEAD (b);
4676 BB_END (a) = BB_END (b);
4679 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4680 We need to explicitly call. */
4681 update_bb_for_insn_chain (insn, BB_END (b), a);
4683 /* Skip possible DELETED_LABEL insn. */
4684 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4685 insn = NEXT_INSN (insn);
4686 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4687 BB_HEAD (b) = BB_END (b) = NULL;
4688 delete_insn (insn);
4690 df_bb_delete (b->index);
4692 /* If B was a forwarder block, propagate the locus on the edge. */
4693 if (forwarder_p
4694 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4695 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4697 if (dump_file)
4698 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4701 /* Split edge E. */
4703 static basic_block
4704 cfg_layout_split_edge (edge e)
4706 basic_block new_bb =
4707 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4708 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4709 NULL_RTX, e->src);
4711 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4712 BB_COPY_PARTITION (new_bb, e->src);
4713 else
4714 BB_COPY_PARTITION (new_bb, e->dest);
4715 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4716 redirect_edge_and_branch_force (e, new_bb);
4718 return new_bb;
4721 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4723 static void
4724 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4728 /* Return true if BB contains only labels or non-executable
4729 instructions. */
4731 static bool
4732 rtl_block_empty_p (basic_block bb)
4734 rtx_insn *insn;
4736 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4737 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4738 return true;
4740 FOR_BB_INSNS (bb, insn)
4741 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4742 return false;
4744 return true;
4747 /* Split a basic block if it ends with a conditional branch and if
4748 the other part of the block is not empty. */
4750 static basic_block
4751 rtl_split_block_before_cond_jump (basic_block bb)
4753 rtx_insn *insn;
4754 rtx_insn *split_point = NULL;
4755 rtx_insn *last = NULL;
4756 bool found_code = false;
4758 FOR_BB_INSNS (bb, insn)
4760 if (any_condjump_p (insn))
4761 split_point = last;
4762 else if (NONDEBUG_INSN_P (insn))
4763 found_code = true;
4764 last = insn;
4767 /* Did not find everything. */
4768 if (found_code && split_point)
4769 return split_block (bb, split_point)->dest;
4770 else
4771 return NULL;
4774 /* Return 1 if BB ends with a call, possibly followed by some
4775 instructions that must stay with the call, 0 otherwise. */
4777 static bool
4778 rtl_block_ends_with_call_p (basic_block bb)
4780 rtx_insn *insn = BB_END (bb);
4782 while (!CALL_P (insn)
4783 && insn != BB_HEAD (bb)
4784 && (keep_with_call_p (insn)
4785 || NOTE_P (insn)
4786 || DEBUG_INSN_P (insn)))
4787 insn = PREV_INSN (insn);
4788 return (CALL_P (insn));
4791 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4793 static bool
4794 rtl_block_ends_with_condjump_p (const_basic_block bb)
4796 return any_condjump_p (BB_END (bb));
4799 /* Return true if we need to add fake edge to exit.
4800 Helper function for rtl_flow_call_edges_add. */
4802 static bool
4803 need_fake_edge_p (const rtx_insn *insn)
4805 if (!INSN_P (insn))
4806 return false;
4808 if ((CALL_P (insn)
4809 && !SIBLING_CALL_P (insn)
4810 && !find_reg_note (insn, REG_NORETURN, NULL)
4811 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4812 return true;
4814 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4815 && MEM_VOLATILE_P (PATTERN (insn)))
4816 || (GET_CODE (PATTERN (insn)) == PARALLEL
4817 && asm_noperands (insn) != -1
4818 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4819 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4822 /* Add fake edges to the function exit for any non constant and non noreturn
4823 calls, volatile inline assembly in the bitmap of blocks specified by
4824 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4825 that were split.
4827 The goal is to expose cases in which entering a basic block does not imply
4828 that all subsequent instructions must be executed. */
4830 static int
4831 rtl_flow_call_edges_add (sbitmap blocks)
4833 int i;
4834 int blocks_split = 0;
4835 int last_bb = last_basic_block_for_fn (cfun);
4836 bool check_last_block = false;
4838 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4839 return 0;
4841 if (! blocks)
4842 check_last_block = true;
4843 else
4844 check_last_block = bitmap_bit_p (blocks,
4845 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4847 /* In the last basic block, before epilogue generation, there will be
4848 a fallthru edge to EXIT. Special care is required if the last insn
4849 of the last basic block is a call because make_edge folds duplicate
4850 edges, which would result in the fallthru edge also being marked
4851 fake, which would result in the fallthru edge being removed by
4852 remove_fake_edges, which would result in an invalid CFG.
4854 Moreover, we can't elide the outgoing fake edge, since the block
4855 profiler needs to take this into account in order to solve the minimal
4856 spanning tree in the case that the call doesn't return.
4858 Handle this by adding a dummy instruction in a new last basic block. */
4859 if (check_last_block)
4861 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4862 rtx_insn *insn = BB_END (bb);
4864 /* Back up past insns that must be kept in the same block as a call. */
4865 while (insn != BB_HEAD (bb)
4866 && keep_with_call_p (insn))
4867 insn = PREV_INSN (insn);
4869 if (need_fake_edge_p (insn))
4871 edge e;
4873 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4874 if (e)
4876 insert_insn_on_edge (gen_use (const0_rtx), e);
4877 commit_edge_insertions ();
4882 /* Now add fake edges to the function exit for any non constant
4883 calls since there is no way that we can determine if they will
4884 return or not... */
4886 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4888 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4889 rtx_insn *insn;
4890 rtx_insn *prev_insn;
4892 if (!bb)
4893 continue;
4895 if (blocks && !bitmap_bit_p (blocks, i))
4896 continue;
4898 for (insn = BB_END (bb); ; insn = prev_insn)
4900 prev_insn = PREV_INSN (insn);
4901 if (need_fake_edge_p (insn))
4903 edge e;
4904 rtx_insn *split_at_insn = insn;
4906 /* Don't split the block between a call and an insn that should
4907 remain in the same block as the call. */
4908 if (CALL_P (insn))
4909 while (split_at_insn != BB_END (bb)
4910 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4911 split_at_insn = NEXT_INSN (split_at_insn);
4913 /* The handling above of the final block before the epilogue
4914 should be enough to verify that there is no edge to the exit
4915 block in CFG already. Calling make_edge in such case would
4916 cause us to mark that edge as fake and remove it later. */
4918 #ifdef ENABLE_CHECKING
4919 if (split_at_insn == BB_END (bb))
4921 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4922 gcc_assert (e == NULL);
4924 #endif
4926 /* Note that the following may create a new basic block
4927 and renumber the existing basic blocks. */
4928 if (split_at_insn != BB_END (bb))
4930 e = split_block (bb, split_at_insn);
4931 if (e)
4932 blocks_split++;
4935 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4938 if (insn == BB_HEAD (bb))
4939 break;
4943 if (blocks_split)
4944 verify_flow_info ();
4946 return blocks_split;
4949 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4950 the conditional branch target, SECOND_HEAD should be the fall-thru
4951 there is no need to handle this here the loop versioning code handles
4952 this. the reason for SECON_HEAD is that it is needed for condition
4953 in trees, and this should be of the same type since it is a hook. */
4954 static void
4955 rtl_lv_add_condition_to_bb (basic_block first_head ,
4956 basic_block second_head ATTRIBUTE_UNUSED,
4957 basic_block cond_bb, void *comp_rtx)
4959 rtx label;
4960 rtx_insn *seq, *jump;
4961 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4962 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4963 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4964 machine_mode mode;
4967 label = block_label (first_head);
4968 mode = GET_MODE (op0);
4969 if (mode == VOIDmode)
4970 mode = GET_MODE (op1);
4972 start_sequence ();
4973 op0 = force_operand (op0, NULL_RTX);
4974 op1 = force_operand (op1, NULL_RTX);
4975 do_compare_rtx_and_jump (op0, op1, comp, 0,
4976 mode, NULL_RTX, NULL_RTX, label, -1);
4977 jump = get_last_insn ();
4978 JUMP_LABEL (jump) = label;
4979 LABEL_NUSES (label)++;
4980 seq = get_insns ();
4981 end_sequence ();
4983 /* Add the new cond, in the new head. */
4984 emit_insn_after (seq, BB_END (cond_bb));
4988 /* Given a block B with unconditional branch at its end, get the
4989 store the return the branch edge and the fall-thru edge in
4990 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4991 static void
4992 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4993 edge *fallthru_edge)
4995 edge e = EDGE_SUCC (b, 0);
4997 if (e->flags & EDGE_FALLTHRU)
4999 *fallthru_edge = e;
5000 *branch_edge = EDGE_SUCC (b, 1);
5002 else
5004 *branch_edge = e;
5005 *fallthru_edge = EDGE_SUCC (b, 1);
5009 void
5010 init_rtl_bb_info (basic_block bb)
5012 gcc_assert (!bb->il.x.rtl);
5013 bb->il.x.head_ = NULL;
5014 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5017 /* Returns true if it is possible to remove edge E by redirecting
5018 it to the destination of the other edge from E->src. */
5020 static bool
5021 rtl_can_remove_branch_p (const_edge e)
5023 const_basic_block src = e->src;
5024 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5025 const rtx_insn *insn = BB_END (src);
5026 rtx set;
5028 /* The conditions are taken from try_redirect_by_replacing_jump. */
5029 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5030 return false;
5032 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5033 return false;
5035 if (BB_PARTITION (src) != BB_PARTITION (target))
5036 return false;
5038 if (!onlyjump_p (insn)
5039 || tablejump_p (insn, NULL, NULL))
5040 return false;
5042 set = single_set (insn);
5043 if (!set || side_effects_p (set))
5044 return false;
5046 return true;
5049 static basic_block
5050 rtl_duplicate_bb (basic_block bb)
5052 bb = cfg_layout_duplicate_bb (bb);
5053 bb->aux = NULL;
5054 return bb;
5057 /* Do book-keeping of basic block BB for the profile consistency checker.
5058 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5059 then do post-pass accounting. Store the counting in RECORD. */
5060 static void
5061 rtl_account_profile_record (basic_block bb, int after_pass,
5062 struct profile_record *record)
5064 rtx_insn *insn;
5065 FOR_BB_INSNS (bb, insn)
5066 if (INSN_P (insn))
5068 record->size[after_pass]
5069 += insn_rtx_cost (PATTERN (insn), false);
5070 if (profile_status_for_fn (cfun) == PROFILE_READ)
5071 record->time[after_pass]
5072 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
5073 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5074 record->time[after_pass]
5075 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5079 /* Implementation of CFG manipulation for linearized RTL. */
5080 struct cfg_hooks rtl_cfg_hooks = {
5081 "rtl",
5082 rtl_verify_flow_info,
5083 rtl_dump_bb,
5084 rtl_dump_bb_for_graph,
5085 rtl_create_basic_block,
5086 rtl_redirect_edge_and_branch,
5087 rtl_redirect_edge_and_branch_force,
5088 rtl_can_remove_branch_p,
5089 rtl_delete_block,
5090 rtl_split_block,
5091 rtl_move_block_after,
5092 rtl_can_merge_blocks, /* can_merge_blocks_p */
5093 rtl_merge_blocks,
5094 rtl_predict_edge,
5095 rtl_predicted_by_p,
5096 cfg_layout_can_duplicate_bb_p,
5097 rtl_duplicate_bb,
5098 rtl_split_edge,
5099 rtl_make_forwarder_block,
5100 rtl_tidy_fallthru_edge,
5101 rtl_force_nonfallthru,
5102 rtl_block_ends_with_call_p,
5103 rtl_block_ends_with_condjump_p,
5104 rtl_flow_call_edges_add,
5105 NULL, /* execute_on_growing_pred */
5106 NULL, /* execute_on_shrinking_pred */
5107 NULL, /* duplicate loop for trees */
5108 NULL, /* lv_add_condition_to_bb */
5109 NULL, /* lv_adjust_loop_header_phi*/
5110 NULL, /* extract_cond_bb_edges */
5111 NULL, /* flush_pending_stmts */
5112 rtl_block_empty_p, /* block_empty_p */
5113 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5114 rtl_account_profile_record,
5117 /* Implementation of CFG manipulation for cfg layout RTL, where
5118 basic block connected via fallthru edges does not have to be adjacent.
5119 This representation will hopefully become the default one in future
5120 version of the compiler. */
5122 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5123 "cfglayout mode",
5124 rtl_verify_flow_info_1,
5125 rtl_dump_bb,
5126 rtl_dump_bb_for_graph,
5127 cfg_layout_create_basic_block,
5128 cfg_layout_redirect_edge_and_branch,
5129 cfg_layout_redirect_edge_and_branch_force,
5130 rtl_can_remove_branch_p,
5131 cfg_layout_delete_block,
5132 cfg_layout_split_block,
5133 rtl_move_block_after,
5134 cfg_layout_can_merge_blocks_p,
5135 cfg_layout_merge_blocks,
5136 rtl_predict_edge,
5137 rtl_predicted_by_p,
5138 cfg_layout_can_duplicate_bb_p,
5139 cfg_layout_duplicate_bb,
5140 cfg_layout_split_edge,
5141 rtl_make_forwarder_block,
5142 NULL, /* tidy_fallthru_edge */
5143 rtl_force_nonfallthru,
5144 rtl_block_ends_with_call_p,
5145 rtl_block_ends_with_condjump_p,
5146 rtl_flow_call_edges_add,
5147 NULL, /* execute_on_growing_pred */
5148 NULL, /* execute_on_shrinking_pred */
5149 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5150 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5151 NULL, /* lv_adjust_loop_header_phi*/
5152 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5153 NULL, /* flush_pending_stmts */
5154 rtl_block_empty_p, /* block_empty_p */
5155 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5156 rtl_account_profile_record,
5159 #include "gt-cfgrtl.h"