c-family/
[official-gcc.git] / gcc / cfgrtl.c
blobb58562fcc5f5851a080c2ec8a7bfaab0028c12ad
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
36 Functions not supposed for generic use:
37 - Infrastructure to determine quickly basic block for insn
38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
39 - Edge redirection with updating and optimizing of insn chain
40 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "config.h"
43 #include "system.h"
44 #include "coretypes.h"
45 #include "tm.h"
46 #include "tree.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "function.h"
52 #include "except.h"
53 #include "rtl-error.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-attr.h"
57 #include "insn-config.h"
58 #include "expr.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfgloop.h"
62 #include "ggc.h"
63 #include "tree-pass.h"
64 #include "df.h"
66 /* Holds the interesting leading and trailing notes for the function.
67 Only applicable if the CFG is in cfglayout mode. */
68 static GTY(()) rtx cfg_layout_function_footer;
69 static GTY(()) rtx cfg_layout_function_header;
71 static rtx skip_insns_after_block (basic_block);
72 static void record_effective_endpoints (void);
73 static rtx label_for_bb (basic_block);
74 static void fixup_reorder_chain (void);
76 void verify_insn_chain (void);
77 static void fixup_fallthru_exit_predecessor (void);
78 static int can_delete_note_p (const_rtx);
79 static int can_delete_label_p (const_rtx);
80 static basic_block rtl_split_edge (edge);
81 static bool rtl_move_block_after (basic_block, basic_block);
82 static int rtl_verify_flow_info (void);
83 static basic_block cfg_layout_split_block (basic_block, void *);
84 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
85 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
86 static void cfg_layout_delete_block (basic_block);
87 static void rtl_delete_block (basic_block);
88 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
89 static edge rtl_redirect_edge_and_branch (edge, basic_block);
90 static basic_block rtl_split_block (basic_block, void *);
91 static void rtl_dump_bb (FILE *, basic_block, int, int);
92 static int rtl_verify_flow_info_1 (void);
93 static void rtl_make_forwarder_block (edge);
95 /* Return true if NOTE is not one of the ones that must be kept paired,
96 so that we may simply delete it. */
98 static int
99 can_delete_note_p (const_rtx note)
101 switch (NOTE_KIND (note))
103 case NOTE_INSN_DELETED:
104 case NOTE_INSN_BASIC_BLOCK:
105 case NOTE_INSN_EPILOGUE_BEG:
106 return true;
108 default:
109 return false;
113 /* True if a given label can be deleted. */
115 static int
116 can_delete_label_p (const_rtx label)
118 return (!LABEL_PRESERVE_P (label)
119 /* User declared labels must be preserved. */
120 && LABEL_NAME (label) == 0
121 && !in_expr_list_p (forced_labels, label));
124 /* Delete INSN by patching it out. */
126 void
127 delete_insn (rtx insn)
129 rtx note;
130 bool really_delete = true;
132 if (LABEL_P (insn))
134 /* Some labels can't be directly removed from the INSN chain, as they
135 might be references via variables, constant pool etc.
136 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
137 if (! can_delete_label_p (insn))
139 const char *name = LABEL_NAME (insn);
140 basic_block bb = BLOCK_FOR_INSN (insn);
141 rtx bb_note = NEXT_INSN (insn);
143 really_delete = false;
144 PUT_CODE (insn, NOTE);
145 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
146 NOTE_DELETED_LABEL_NAME (insn) = name;
148 if (bb_note != NULL_RTX && NOTE_INSN_BASIC_BLOCK_P (bb_note)
149 && BLOCK_FOR_INSN (bb_note) == bb)
151 reorder_insns_nobb (insn, insn, bb_note);
152 BB_HEAD (bb) = bb_note;
153 if (BB_END (bb) == bb_note)
154 BB_END (bb) = insn;
158 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
161 if (really_delete)
163 /* If this insn has already been deleted, something is very wrong. */
164 gcc_assert (!INSN_DELETED_P (insn));
165 remove_insn (insn);
166 INSN_DELETED_P (insn) = 1;
169 /* If deleting a jump, decrement the use count of the label. Deleting
170 the label itself should happen in the normal course of block merging. */
171 if (JUMP_P (insn))
173 if (JUMP_LABEL (insn)
174 && LABEL_P (JUMP_LABEL (insn)))
175 LABEL_NUSES (JUMP_LABEL (insn))--;
177 /* If there are more targets, remove them too. */
178 while ((note
179 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
180 && LABEL_P (XEXP (note, 0)))
182 LABEL_NUSES (XEXP (note, 0))--;
183 remove_note (insn, note);
187 /* Also if deleting any insn that references a label as an operand. */
188 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
189 && LABEL_P (XEXP (note, 0)))
191 LABEL_NUSES (XEXP (note, 0))--;
192 remove_note (insn, note);
195 if (JUMP_TABLE_DATA_P (insn))
197 rtx pat = PATTERN (insn);
198 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
199 int len = XVECLEN (pat, diff_vec_p);
200 int i;
202 for (i = 0; i < len; i++)
204 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
206 /* When deleting code in bulk (e.g. removing many unreachable
207 blocks) we can delete a label that's a target of the vector
208 before deleting the vector itself. */
209 if (!NOTE_P (label))
210 LABEL_NUSES (label)--;
215 /* Like delete_insn but also purge dead edges from BB. */
217 void
218 delete_insn_and_edges (rtx insn)
220 bool purge = false;
222 if (INSN_P (insn)
223 && BLOCK_FOR_INSN (insn)
224 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
225 purge = true;
226 delete_insn (insn);
227 if (purge)
228 purge_dead_edges (BLOCK_FOR_INSN (insn));
231 /* Unlink a chain of insns between START and FINISH, leaving notes
232 that must be paired. If CLEAR_BB is true, we set bb field for
233 insns that cannot be removed to NULL. */
235 void
236 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
238 rtx prev, current;
240 /* Unchain the insns one by one. It would be quicker to delete all of these
241 with a single unchaining, rather than one at a time, but we need to keep
242 the NOTE's. */
243 current = finish;
244 while (1)
246 prev = PREV_INSN (current);
247 if (NOTE_P (current) && !can_delete_note_p (current))
249 else
250 delete_insn (current);
252 if (clear_bb && !INSN_DELETED_P (current))
253 set_block_for_insn (current, NULL);
255 if (current == start)
256 break;
257 current = prev;
261 /* Create a new basic block consisting of the instructions between HEAD and END
262 inclusive. This function is designed to allow fast BB construction - reuses
263 the note and basic block struct in BB_NOTE, if any and do not grow
264 BASIC_BLOCK chain and should be used directly only by CFG construction code.
265 END can be NULL in to create new empty basic block before HEAD. Both END
266 and HEAD can be NULL to create basic block at the end of INSN chain.
267 AFTER is the basic block we should be put after. */
269 basic_block
270 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
272 basic_block bb;
274 if (bb_note
275 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
276 && bb->aux == NULL)
278 /* If we found an existing note, thread it back onto the chain. */
280 rtx after;
282 if (LABEL_P (head))
283 after = head;
284 else
286 after = PREV_INSN (head);
287 head = bb_note;
290 if (after != bb_note && NEXT_INSN (after) != bb_note)
291 reorder_insns_nobb (bb_note, bb_note, after);
293 else
295 /* Otherwise we must create a note and a basic block structure. */
297 bb = alloc_block ();
299 init_rtl_bb_info (bb);
300 if (!head && !end)
301 head = end = bb_note
302 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
303 else if (LABEL_P (head) && end)
305 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
306 if (head == end)
307 end = bb_note;
309 else
311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
312 head = bb_note;
313 if (!end)
314 end = head;
317 NOTE_BASIC_BLOCK (bb_note) = bb;
320 /* Always include the bb note in the block. */
321 if (NEXT_INSN (end) == bb_note)
322 end = bb_note;
324 BB_HEAD (bb) = head;
325 BB_END (bb) = end;
326 bb->index = last_basic_block++;
327 bb->flags = BB_NEW | BB_RTL;
328 link_block (bb, after);
329 SET_BASIC_BLOCK (bb->index, bb);
330 df_bb_refs_record (bb->index, false);
331 update_bb_for_insn (bb);
332 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
334 /* Tag the block so that we know it has been used when considering
335 other basic block notes. */
336 bb->aux = bb;
338 return bb;
341 /* Create new basic block consisting of instructions in between HEAD and END
342 and place it to the BB chain after block AFTER. END can be NULL to
343 create a new empty basic block before HEAD. Both END and HEAD can be
344 NULL to create basic block at the end of INSN chain. */
346 static basic_block
347 rtl_create_basic_block (void *headp, void *endp, basic_block after)
349 rtx head = (rtx) headp, end = (rtx) endp;
350 basic_block bb;
352 /* Grow the basic block array if needed. */
353 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
355 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
356 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
359 n_basic_blocks++;
361 bb = create_basic_block_structure (head, end, NULL, after);
362 bb->aux = NULL;
363 return bb;
366 static basic_block
367 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
369 basic_block newbb = rtl_create_basic_block (head, end, after);
371 return newbb;
374 /* Delete the insns in a (non-live) block. We physically delete every
375 non-deleted-note insn, and update the flow graph appropriately.
377 Return nonzero if we deleted an exception handler. */
379 /* ??? Preserving all such notes strikes me as wrong. It would be nice
380 to post-process the stream to remove empty blocks, loops, ranges, etc. */
382 static void
383 rtl_delete_block (basic_block b)
385 rtx insn, end;
387 /* If the head of this block is a CODE_LABEL, then it might be the
388 label for an exception handler which can't be reached. We need
389 to remove the label from the exception_handler_label list. */
390 insn = BB_HEAD (b);
392 end = get_last_bb_insn (b);
394 /* Selectively delete the entire chain. */
395 BB_HEAD (b) = NULL;
396 delete_insn_chain (insn, end, true);
399 if (dump_file)
400 fprintf (dump_file, "deleting block %d\n", b->index);
401 df_bb_delete (b->index);
404 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
406 void
407 compute_bb_for_insn (void)
409 basic_block bb;
411 FOR_EACH_BB (bb)
413 rtx end = BB_END (bb);
414 rtx insn;
416 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
418 BLOCK_FOR_INSN (insn) = bb;
419 if (insn == end)
420 break;
425 /* Release the basic_block_for_insn array. */
427 unsigned int
428 free_bb_for_insn (void)
430 rtx insn;
431 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
432 if (!BARRIER_P (insn))
433 BLOCK_FOR_INSN (insn) = NULL;
434 return 0;
437 static unsigned int
438 rest_of_pass_free_cfg (void)
440 #ifdef DELAY_SLOTS
441 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
442 valid at that point so it would be too late to call df_analyze. */
443 if (optimize > 0 && flag_delayed_branch)
445 df_note_add_problem ();
446 df_analyze ();
448 #endif
450 free_bb_for_insn ();
451 return 0;
454 struct rtl_opt_pass pass_free_cfg =
457 RTL_PASS,
458 "*free_cfg", /* name */
459 NULL, /* gate */
460 rest_of_pass_free_cfg, /* execute */
461 NULL, /* sub */
462 NULL, /* next */
463 0, /* static_pass_number */
464 TV_NONE, /* tv_id */
465 0, /* properties_required */
466 0, /* properties_provided */
467 PROP_cfg, /* properties_destroyed */
468 0, /* todo_flags_start */
469 0, /* todo_flags_finish */
473 /* Return RTX to emit after when we want to emit code on the entry of function. */
475 entry_of_function (void)
477 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
478 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
481 /* Emit INSN at the entry point of the function, ensuring that it is only
482 executed once per function. */
483 void
484 emit_insn_at_entry (rtx insn)
486 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
487 edge e = ei_safe_edge (ei);
488 gcc_assert (e->flags & EDGE_FALLTHRU);
490 insert_insn_on_edge (insn, e);
491 commit_edge_insertions ();
494 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
495 (or BARRIER if found) and notify df of the bb change.
496 The insn chain range is inclusive
497 (i.e. both BEGIN and END will be updated. */
499 static void
500 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
502 rtx insn;
504 end = NEXT_INSN (end);
505 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
506 if (!BARRIER_P (insn))
507 df_insn_change_bb (insn, bb);
510 /* Update BLOCK_FOR_INSN of insns in BB to BB,
511 and notify df of the change. */
513 void
514 update_bb_for_insn (basic_block bb)
516 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
520 /* Like active_insn_p, except keep the return value clobber around
521 even after reload. */
523 static bool
524 flow_active_insn_p (const_rtx insn)
526 if (active_insn_p (insn))
527 return true;
529 /* A clobber of the function return value exists for buggy
530 programs that fail to return a value. Its effect is to
531 keep the return value from being live across the entire
532 function. If we allow it to be skipped, we introduce the
533 possibility for register lifetime confusion. */
534 if (GET_CODE (PATTERN (insn)) == CLOBBER
535 && REG_P (XEXP (PATTERN (insn), 0))
536 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
537 return true;
539 return false;
542 /* Return true if the block has no effect and only forwards control flow to
543 its single destination. */
545 bool
546 contains_no_active_insn_p (const_basic_block bb)
548 rtx insn;
550 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
551 || !single_succ_p (bb))
552 return false;
554 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
555 if (INSN_P (insn) && flow_active_insn_p (insn))
556 return false;
558 return (!INSN_P (insn)
559 || (JUMP_P (insn) && simplejump_p (insn))
560 || !flow_active_insn_p (insn));
563 /* Likewise, but protect loop latches, headers and preheaders. */
564 /* FIXME: Make this a cfg hook. */
566 bool
567 forwarder_block_p (const_basic_block bb)
569 if (!contains_no_active_insn_p (bb))
570 return false;
572 /* Protect loop latches, headers and preheaders. */
573 if (current_loops)
575 basic_block dest;
576 if (bb->loop_father->header == bb)
577 return false;
578 dest = EDGE_SUCC (bb, 0)->dest;
579 if (dest->loop_father->header == dest)
580 return false;
583 return true;
586 /* Return nonzero if we can reach target from src by falling through. */
587 /* FIXME: Make this a cfg hook. */
589 bool
590 can_fallthru (basic_block src, basic_block target)
592 rtx insn = BB_END (src);
593 rtx insn2;
594 edge e;
595 edge_iterator ei;
597 if (target == EXIT_BLOCK_PTR)
598 return true;
599 if (src->next_bb != target)
600 return 0;
601 FOR_EACH_EDGE (e, ei, src->succs)
602 if (e->dest == EXIT_BLOCK_PTR
603 && e->flags & EDGE_FALLTHRU)
604 return 0;
606 insn2 = BB_HEAD (target);
607 if (insn2 && !active_insn_p (insn2))
608 insn2 = next_active_insn (insn2);
610 /* ??? Later we may add code to move jump tables offline. */
611 return next_active_insn (insn) == insn2;
614 /* Return nonzero if we could reach target from src by falling through,
615 if the target was made adjacent. If we already have a fall-through
616 edge to the exit block, we can't do that. */
617 static bool
618 could_fall_through (basic_block src, basic_block target)
620 edge e;
621 edge_iterator ei;
623 if (target == EXIT_BLOCK_PTR)
624 return true;
625 FOR_EACH_EDGE (e, ei, src->succs)
626 if (e->dest == EXIT_BLOCK_PTR
627 && e->flags & EDGE_FALLTHRU)
628 return 0;
629 return true;
632 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
634 bb_note (basic_block bb)
636 rtx note;
638 note = BB_HEAD (bb);
639 if (LABEL_P (note))
640 note = NEXT_INSN (note);
642 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
643 return note;
646 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
647 note associated with the BLOCK. */
649 static rtx
650 first_insn_after_basic_block_note (basic_block block)
652 rtx insn;
654 /* Get the first instruction in the block. */
655 insn = BB_HEAD (block);
657 if (insn == NULL_RTX)
658 return NULL_RTX;
659 if (LABEL_P (insn))
660 insn = NEXT_INSN (insn);
661 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
663 return NEXT_INSN (insn);
666 /* Creates a new basic block just after basic block B by splitting
667 everything after specified instruction I. */
669 static basic_block
670 rtl_split_block (basic_block bb, void *insnp)
672 basic_block new_bb;
673 rtx insn = (rtx) insnp;
674 edge e;
675 edge_iterator ei;
677 if (!insn)
679 insn = first_insn_after_basic_block_note (bb);
681 if (insn)
683 rtx next = insn;
685 insn = PREV_INSN (insn);
687 /* If the block contains only debug insns, insn would have
688 been NULL in a non-debug compilation, and then we'd end
689 up emitting a DELETED note. For -fcompare-debug
690 stability, emit the note too. */
691 if (insn != BB_END (bb)
692 && DEBUG_INSN_P (next)
693 && DEBUG_INSN_P (BB_END (bb)))
695 while (next != BB_END (bb) && DEBUG_INSN_P (next))
696 next = NEXT_INSN (next);
698 if (next == BB_END (bb))
699 emit_note_after (NOTE_INSN_DELETED, next);
702 else
703 insn = get_last_insn ();
706 /* We probably should check type of the insn so that we do not create
707 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
708 bother. */
709 if (insn == BB_END (bb))
710 emit_note_after (NOTE_INSN_DELETED, insn);
712 /* Create the new basic block. */
713 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
714 BB_COPY_PARTITION (new_bb, bb);
715 BB_END (bb) = insn;
717 /* Redirect the outgoing edges. */
718 new_bb->succs = bb->succs;
719 bb->succs = NULL;
720 FOR_EACH_EDGE (e, ei, new_bb->succs)
721 e->src = new_bb;
723 /* The new block starts off being dirty. */
724 df_set_bb_dirty (bb);
725 return new_bb;
728 /* Return true if the single edge between blocks A and B is the only place
729 in RTL which holds some unique locus. */
731 static bool
732 unique_locus_on_edge_between_p (basic_block a, basic_block b)
734 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
735 rtx insn, end;
737 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
738 return false;
740 /* First scan block A backward. */
741 insn = BB_END (a);
742 end = PREV_INSN (BB_HEAD (a));
743 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
744 insn = PREV_INSN (insn);
746 if (insn != end && INSN_LOCATION (insn) == goto_locus)
747 return false;
749 /* Then scan block B forward. */
750 insn = BB_HEAD (b);
751 if (insn)
753 end = NEXT_INSN (BB_END (b));
754 while (insn != end && !NONDEBUG_INSN_P (insn))
755 insn = NEXT_INSN (insn);
757 if (insn != end && INSN_HAS_LOCATION (insn)
758 && INSN_LOCATION (insn) == goto_locus)
759 return false;
762 return true;
765 /* If the single edge between blocks A and B is the only place in RTL which
766 holds some unique locus, emit a nop with that locus between the blocks. */
768 static void
769 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
771 if (!unique_locus_on_edge_between_p (a, b))
772 return;
774 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
775 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
778 /* Blocks A and B are to be merged into a single block A. The insns
779 are already contiguous. */
781 static void
782 rtl_merge_blocks (basic_block a, basic_block b)
784 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
785 rtx del_first = NULL_RTX, del_last = NULL_RTX;
786 rtx b_debug_start = b_end, b_debug_end = b_end;
787 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
788 int b_empty = 0;
790 if (dump_file)
791 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
792 a->index);
794 while (DEBUG_INSN_P (b_end))
795 b_end = PREV_INSN (b_debug_start = b_end);
797 /* If there was a CODE_LABEL beginning B, delete it. */
798 if (LABEL_P (b_head))
800 /* Detect basic blocks with nothing but a label. This can happen
801 in particular at the end of a function. */
802 if (b_head == b_end)
803 b_empty = 1;
805 del_first = del_last = b_head;
806 b_head = NEXT_INSN (b_head);
809 /* Delete the basic block note and handle blocks containing just that
810 note. */
811 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
813 if (b_head == b_end)
814 b_empty = 1;
815 if (! del_last)
816 del_first = b_head;
818 del_last = b_head;
819 b_head = NEXT_INSN (b_head);
822 /* If there was a jump out of A, delete it. */
823 if (JUMP_P (a_end))
825 rtx prev;
827 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
828 if (!NOTE_P (prev)
829 || NOTE_INSN_BASIC_BLOCK_P (prev)
830 || prev == BB_HEAD (a))
831 break;
833 del_first = a_end;
835 #ifdef HAVE_cc0
836 /* If this was a conditional jump, we need to also delete
837 the insn that set cc0. */
838 if (only_sets_cc0_p (prev))
840 rtx tmp = prev;
842 prev = prev_nonnote_insn (prev);
843 if (!prev)
844 prev = BB_HEAD (a);
845 del_first = tmp;
847 #endif
849 a_end = PREV_INSN (del_first);
851 else if (BARRIER_P (NEXT_INSN (a_end)))
852 del_first = NEXT_INSN (a_end);
854 /* Delete everything marked above as well as crap that might be
855 hanging out between the two blocks. */
856 BB_END (a) = a_end;
857 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
858 delete_insn_chain (del_first, del_last, true);
860 /* When not optimizing CFG and the edge is the only place in RTL which holds
861 some unique locus, emit a nop with that locus in between. */
862 if (!optimize)
864 emit_nop_for_unique_locus_between (a, b);
865 a_end = BB_END (a);
868 /* Reassociate the insns of B with A. */
869 if (!b_empty)
871 update_bb_for_insn_chain (a_end, b_debug_end, a);
873 BB_END (a) = b_debug_end;
874 BB_HEAD (b) = NULL_RTX;
876 else if (b_end != b_debug_end)
878 /* Move any deleted labels and other notes between the end of A
879 and the debug insns that make up B after the debug insns,
880 bringing the debug insns into A while keeping the notes after
881 the end of A. */
882 if (NEXT_INSN (a_end) != b_debug_start)
883 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
884 b_debug_end);
885 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
886 BB_END (a) = b_debug_end;
889 df_bb_delete (b->index);
891 /* If B was a forwarder block, propagate the locus on the edge. */
892 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
893 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
895 if (dump_file)
896 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
900 /* Return true when block A and B can be merged. */
902 static bool
903 rtl_can_merge_blocks (basic_block a, basic_block b)
905 /* If we are partitioning hot/cold basic blocks, we don't want to
906 mess up unconditional or indirect jumps that cross between hot
907 and cold sections.
909 Basic block partitioning may result in some jumps that appear to
910 be optimizable (or blocks that appear to be mergeable), but which really
911 must be left untouched (they are required to make it safely across
912 partition boundaries). See the comments at the top of
913 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
915 if (BB_PARTITION (a) != BB_PARTITION (b))
916 return false;
918 /* Protect the loop latches. */
919 if (current_loops && b->loop_father->latch == b)
920 return false;
922 /* There must be exactly one edge in between the blocks. */
923 return (single_succ_p (a)
924 && single_succ (a) == b
925 && single_pred_p (b)
926 && a != b
927 /* Must be simple edge. */
928 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
929 && a->next_bb == b
930 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
931 /* If the jump insn has side effects,
932 we can't kill the edge. */
933 && (!JUMP_P (BB_END (a))
934 || (reload_completed
935 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
938 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
939 exist. */
942 block_label (basic_block block)
944 if (block == EXIT_BLOCK_PTR)
945 return NULL_RTX;
947 if (!LABEL_P (BB_HEAD (block)))
949 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
952 return BB_HEAD (block);
955 /* Attempt to perform edge redirection by replacing possibly complex jump
956 instruction by unconditional jump or removing jump completely. This can
957 apply only if all edges now point to the same block. The parameters and
958 return values are equivalent to redirect_edge_and_branch. */
960 edge
961 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
963 basic_block src = e->src;
964 rtx insn = BB_END (src), kill_from;
965 rtx set;
966 int fallthru = 0;
968 /* If we are partitioning hot/cold basic blocks, we don't want to
969 mess up unconditional or indirect jumps that cross between hot
970 and cold sections.
972 Basic block partitioning may result in some jumps that appear to
973 be optimizable (or blocks that appear to be mergeable), but which really
974 must be left untouched (they are required to make it safely across
975 partition boundaries). See the comments at the top of
976 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
978 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
979 || BB_PARTITION (src) != BB_PARTITION (target))
980 return NULL;
982 /* We can replace or remove a complex jump only when we have exactly
983 two edges. Also, if we have exactly one outgoing edge, we can
984 redirect that. */
985 if (EDGE_COUNT (src->succs) >= 3
986 /* Verify that all targets will be TARGET. Specifically, the
987 edge that is not E must also go to TARGET. */
988 || (EDGE_COUNT (src->succs) == 2
989 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
990 return NULL;
992 if (!onlyjump_p (insn))
993 return NULL;
994 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
995 return NULL;
997 /* Avoid removing branch with side effects. */
998 set = single_set (insn);
999 if (!set || side_effects_p (set))
1000 return NULL;
1002 /* In case we zap a conditional jump, we'll need to kill
1003 the cc0 setter too. */
1004 kill_from = insn;
1005 #ifdef HAVE_cc0
1006 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1007 && only_sets_cc0_p (PREV_INSN (insn)))
1008 kill_from = PREV_INSN (insn);
1009 #endif
1011 /* See if we can create the fallthru edge. */
1012 if (in_cfglayout || can_fallthru (src, target))
1014 if (dump_file)
1015 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1016 fallthru = 1;
1018 /* Selectively unlink whole insn chain. */
1019 if (in_cfglayout)
1021 rtx insn = BB_FOOTER (src);
1023 delete_insn_chain (kill_from, BB_END (src), false);
1025 /* Remove barriers but keep jumptables. */
1026 while (insn)
1028 if (BARRIER_P (insn))
1030 if (PREV_INSN (insn))
1031 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1032 else
1033 BB_FOOTER (src) = NEXT_INSN (insn);
1034 if (NEXT_INSN (insn))
1035 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1037 if (LABEL_P (insn))
1038 break;
1039 insn = NEXT_INSN (insn);
1042 else
1043 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1044 false);
1047 /* If this already is simplejump, redirect it. */
1048 else if (simplejump_p (insn))
1050 if (e->dest == target)
1051 return NULL;
1052 if (dump_file)
1053 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1054 INSN_UID (insn), e->dest->index, target->index);
1055 if (!redirect_jump (insn, block_label (target), 0))
1057 gcc_assert (target == EXIT_BLOCK_PTR);
1058 return NULL;
1062 /* Cannot do anything for target exit block. */
1063 else if (target == EXIT_BLOCK_PTR)
1064 return NULL;
1066 /* Or replace possibly complicated jump insn by simple jump insn. */
1067 else
1069 rtx target_label = block_label (target);
1070 rtx barrier, label, table;
1072 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1073 JUMP_LABEL (BB_END (src)) = target_label;
1074 LABEL_NUSES (target_label)++;
1075 if (dump_file)
1076 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1077 INSN_UID (insn), INSN_UID (BB_END (src)));
1080 delete_insn_chain (kill_from, insn, false);
1082 /* Recognize a tablejump that we are converting to a
1083 simple jump and remove its associated CODE_LABEL
1084 and ADDR_VEC or ADDR_DIFF_VEC. */
1085 if (tablejump_p (insn, &label, &table))
1086 delete_insn_chain (label, table, false);
1088 barrier = next_nonnote_insn (BB_END (src));
1089 if (!barrier || !BARRIER_P (barrier))
1090 emit_barrier_after (BB_END (src));
1091 else
1093 if (barrier != NEXT_INSN (BB_END (src)))
1095 /* Move the jump before barrier so that the notes
1096 which originally were or were created before jump table are
1097 inside the basic block. */
1098 rtx new_insn = BB_END (src);
1100 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1101 PREV_INSN (barrier), src);
1103 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1104 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1106 NEXT_INSN (new_insn) = barrier;
1107 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1109 PREV_INSN (new_insn) = PREV_INSN (barrier);
1110 PREV_INSN (barrier) = new_insn;
1115 /* Keep only one edge out and set proper flags. */
1116 if (!single_succ_p (src))
1117 remove_edge (e);
1118 gcc_assert (single_succ_p (src));
1120 e = single_succ_edge (src);
1121 if (fallthru)
1122 e->flags = EDGE_FALLTHRU;
1123 else
1124 e->flags = 0;
1126 e->probability = REG_BR_PROB_BASE;
1127 e->count = src->count;
1129 if (e->dest != target)
1130 redirect_edge_succ (e, target);
1131 return e;
1134 /* Subroutine of redirect_branch_edge that tries to patch the jump
1135 instruction INSN so that it reaches block NEW. Do this
1136 only when it originally reached block OLD. Return true if this
1137 worked or the original target wasn't OLD, return false if redirection
1138 doesn't work. */
1140 static bool
1141 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1143 rtx tmp;
1144 /* Recognize a tablejump and adjust all matching cases. */
1145 if (tablejump_p (insn, NULL, &tmp))
1147 rtvec vec;
1148 int j;
1149 rtx new_label = block_label (new_bb);
1151 if (new_bb == EXIT_BLOCK_PTR)
1152 return false;
1153 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1154 vec = XVEC (PATTERN (tmp), 0);
1155 else
1156 vec = XVEC (PATTERN (tmp), 1);
1158 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1159 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1161 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1162 --LABEL_NUSES (old_label);
1163 ++LABEL_NUSES (new_label);
1166 /* Handle casesi dispatch insns. */
1167 if ((tmp = single_set (insn)) != NULL
1168 && SET_DEST (tmp) == pc_rtx
1169 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1170 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1171 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1173 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1174 new_label);
1175 --LABEL_NUSES (old_label);
1176 ++LABEL_NUSES (new_label);
1179 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1181 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1182 rtx new_label, note;
1184 if (new_bb == EXIT_BLOCK_PTR)
1185 return false;
1186 new_label = block_label (new_bb);
1188 for (i = 0; i < n; ++i)
1190 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1191 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1192 if (XEXP (old_ref, 0) == old_label)
1194 ASM_OPERANDS_LABEL (tmp, i)
1195 = gen_rtx_LABEL_REF (Pmode, new_label);
1196 --LABEL_NUSES (old_label);
1197 ++LABEL_NUSES (new_label);
1201 if (JUMP_LABEL (insn) == old_label)
1203 JUMP_LABEL (insn) = new_label;
1204 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1205 if (note)
1206 remove_note (insn, note);
1208 else
1210 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1211 if (note)
1212 remove_note (insn, note);
1213 if (JUMP_LABEL (insn) != new_label
1214 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1215 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1217 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1218 != NULL_RTX)
1219 XEXP (note, 0) = new_label;
1221 else
1223 /* ?? We may play the games with moving the named labels from
1224 one basic block to the other in case only one computed_jump is
1225 available. */
1226 if (computed_jump_p (insn)
1227 /* A return instruction can't be redirected. */
1228 || returnjump_p (insn))
1229 return false;
1231 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1233 /* If the insn doesn't go where we think, we're confused. */
1234 gcc_assert (JUMP_LABEL (insn) == old_label);
1236 /* If the substitution doesn't succeed, die. This can happen
1237 if the back end emitted unrecognizable instructions or if
1238 target is exit block on some arches. */
1239 if (!redirect_jump (insn, block_label (new_bb), 0))
1241 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1242 return false;
1246 return true;
1250 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1251 NULL on failure */
1252 static edge
1253 redirect_branch_edge (edge e, basic_block target)
1255 rtx old_label = BB_HEAD (e->dest);
1256 basic_block src = e->src;
1257 rtx insn = BB_END (src);
1259 /* We can only redirect non-fallthru edges of jump insn. */
1260 if (e->flags & EDGE_FALLTHRU)
1261 return NULL;
1262 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1263 return NULL;
1265 if (!currently_expanding_to_rtl)
1267 if (!patch_jump_insn (insn, old_label, target))
1268 return NULL;
1270 else
1271 /* When expanding this BB might actually contain multiple
1272 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1273 Redirect all of those that match our label. */
1274 FOR_BB_INSNS (src, insn)
1275 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1276 return NULL;
1278 if (dump_file)
1279 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1280 e->src->index, e->dest->index, target->index);
1282 if (e->dest != target)
1283 e = redirect_edge_succ_nodup (e, target);
1285 return e;
1288 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1289 expense of adding new instructions or reordering basic blocks.
1291 Function can be also called with edge destination equivalent to the TARGET.
1292 Then it should try the simplifications and do nothing if none is possible.
1294 Return edge representing the branch if transformation succeeded. Return NULL
1295 on failure.
1296 We still return NULL in case E already destinated TARGET and we didn't
1297 managed to simplify instruction stream. */
1299 static edge
1300 rtl_redirect_edge_and_branch (edge e, basic_block target)
1302 edge ret;
1303 basic_block src = e->src;
1305 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1306 return NULL;
1308 if (e->dest == target)
1309 return e;
1311 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1313 df_set_bb_dirty (src);
1314 return ret;
1317 ret = redirect_branch_edge (e, target);
1318 if (!ret)
1319 return NULL;
1321 df_set_bb_dirty (src);
1322 return ret;
1325 /* Like force_nonfallthru below, but additionally performs redirection
1326 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1327 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1328 simple_return_rtx, indicating which kind of returnjump to create.
1329 It should be NULL otherwise. */
1331 basic_block
1332 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1334 basic_block jump_block, new_bb = NULL, src = e->src;
1335 rtx note;
1336 edge new_edge;
1337 int abnormal_edge_flags = 0;
1338 bool asm_goto_edge = false;
1339 int loc;
1341 /* In the case the last instruction is conditional jump to the next
1342 instruction, first redirect the jump itself and then continue
1343 by creating a basic block afterwards to redirect fallthru edge. */
1344 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1345 && any_condjump_p (BB_END (e->src))
1346 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1348 rtx note;
1349 edge b = unchecked_make_edge (e->src, target, 0);
1350 bool redirected;
1352 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1353 gcc_assert (redirected);
1355 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1356 if (note)
1358 int prob = INTVAL (XEXP (note, 0));
1360 b->probability = prob;
1361 b->count = e->count * prob / REG_BR_PROB_BASE;
1362 e->probability -= e->probability;
1363 e->count -= b->count;
1364 if (e->probability < 0)
1365 e->probability = 0;
1366 if (e->count < 0)
1367 e->count = 0;
1371 if (e->flags & EDGE_ABNORMAL)
1373 /* Irritating special case - fallthru edge to the same block as abnormal
1374 edge.
1375 We can't redirect abnormal edge, but we still can split the fallthru
1376 one and create separate abnormal edge to original destination.
1377 This allows bb-reorder to make such edge non-fallthru. */
1378 gcc_assert (e->dest == target);
1379 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1380 e->flags &= EDGE_FALLTHRU;
1382 else
1384 gcc_assert (e->flags & EDGE_FALLTHRU);
1385 if (e->src == ENTRY_BLOCK_PTR)
1387 /* We can't redirect the entry block. Create an empty block
1388 at the start of the function which we use to add the new
1389 jump. */
1390 edge tmp;
1391 edge_iterator ei;
1392 bool found = false;
1394 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1396 /* Change the existing edge's source to be the new block, and add
1397 a new edge from the entry block to the new block. */
1398 e->src = bb;
1399 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1401 if (tmp == e)
1403 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1404 found = true;
1405 break;
1407 else
1408 ei_next (&ei);
1411 gcc_assert (found);
1413 VEC_safe_push (edge, gc, bb->succs, e);
1414 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1418 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1419 don't point to the target or fallthru label. */
1420 if (JUMP_P (BB_END (e->src))
1421 && target != EXIT_BLOCK_PTR
1422 && (e->flags & EDGE_FALLTHRU)
1423 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1425 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1427 for (i = 0; i < n; ++i)
1429 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1430 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1431 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1432 asm_goto_edge = true;
1436 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1438 gcov_type count = e->count;
1439 int probability = e->probability;
1440 /* Create the new structures. */
1442 /* If the old block ended with a tablejump, skip its table
1443 by searching forward from there. Otherwise start searching
1444 forward from the last instruction of the old block. */
1445 if (!tablejump_p (BB_END (e->src), NULL, &note))
1446 note = BB_END (e->src);
1447 note = NEXT_INSN (note);
1449 jump_block = create_basic_block (note, NULL, e->src);
1450 jump_block->count = count;
1451 jump_block->frequency = EDGE_FREQUENCY (e);
1453 /* Make sure new block ends up in correct hot/cold section. */
1455 BB_COPY_PARTITION (jump_block, e->src);
1456 if (flag_reorder_blocks_and_partition
1457 && targetm_common.have_named_sections
1458 && JUMP_P (BB_END (jump_block))
1459 && !any_condjump_p (BB_END (jump_block))
1460 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1461 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1463 /* Wire edge in. */
1464 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1465 new_edge->probability = probability;
1466 new_edge->count = count;
1468 /* Redirect old edge. */
1469 redirect_edge_pred (e, jump_block);
1470 e->probability = REG_BR_PROB_BASE;
1472 /* If asm goto has any label refs to target's label,
1473 add also edge from asm goto bb to target. */
1474 if (asm_goto_edge)
1476 new_edge->probability /= 2;
1477 new_edge->count /= 2;
1478 jump_block->count /= 2;
1479 jump_block->frequency /= 2;
1480 new_edge = make_edge (new_edge->src, target,
1481 e->flags & ~EDGE_FALLTHRU);
1482 new_edge->probability = probability - probability / 2;
1483 new_edge->count = count - count / 2;
1486 new_bb = jump_block;
1488 else
1489 jump_block = e->src;
1491 loc = e->goto_locus;
1492 e->flags &= ~EDGE_FALLTHRU;
1493 if (target == EXIT_BLOCK_PTR)
1495 if (jump_label == ret_rtx)
1497 #ifdef HAVE_return
1498 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1499 #else
1500 gcc_unreachable ();
1501 #endif
1503 else
1505 gcc_assert (jump_label == simple_return_rtx);
1506 #ifdef HAVE_simple_return
1507 emit_jump_insn_after_setloc (gen_simple_return (),
1508 BB_END (jump_block), loc);
1509 #else
1510 gcc_unreachable ();
1511 #endif
1513 set_return_jump_label (BB_END (jump_block));
1515 else
1517 rtx label = block_label (target);
1518 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1519 JUMP_LABEL (BB_END (jump_block)) = label;
1520 LABEL_NUSES (label)++;
1523 emit_barrier_after (BB_END (jump_block));
1524 redirect_edge_succ_nodup (e, target);
1526 if (abnormal_edge_flags)
1527 make_edge (src, target, abnormal_edge_flags);
1529 df_mark_solutions_dirty ();
1530 return new_bb;
1533 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1534 (and possibly create new basic block) to make edge non-fallthru.
1535 Return newly created BB or NULL if none. */
1537 static basic_block
1538 rtl_force_nonfallthru (edge e)
1540 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1543 /* Redirect edge even at the expense of creating new jump insn or
1544 basic block. Return new basic block if created, NULL otherwise.
1545 Conversion must be possible. */
1547 static basic_block
1548 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1550 if (redirect_edge_and_branch (e, target)
1551 || e->dest == target)
1552 return NULL;
1554 /* In case the edge redirection failed, try to force it to be non-fallthru
1555 and redirect newly created simplejump. */
1556 df_set_bb_dirty (e->src);
1557 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1560 /* The given edge should potentially be a fallthru edge. If that is in
1561 fact true, delete the jump and barriers that are in the way. */
1563 static void
1564 rtl_tidy_fallthru_edge (edge e)
1566 rtx q;
1567 basic_block b = e->src, c = b->next_bb;
1569 /* ??? In a late-running flow pass, other folks may have deleted basic
1570 blocks by nopping out blocks, leaving multiple BARRIERs between here
1571 and the target label. They ought to be chastised and fixed.
1573 We can also wind up with a sequence of undeletable labels between
1574 one block and the next.
1576 So search through a sequence of barriers, labels, and notes for
1577 the head of block C and assert that we really do fall through. */
1579 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1580 if (INSN_P (q))
1581 return;
1583 /* Remove what will soon cease being the jump insn from the source block.
1584 If block B consisted only of this single jump, turn it into a deleted
1585 note. */
1586 q = BB_END (b);
1587 if (JUMP_P (q)
1588 && onlyjump_p (q)
1589 && (any_uncondjump_p (q)
1590 || single_succ_p (b)))
1592 #ifdef HAVE_cc0
1593 /* If this was a conditional jump, we need to also delete
1594 the insn that set cc0. */
1595 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1596 q = PREV_INSN (q);
1597 #endif
1599 q = PREV_INSN (q);
1602 /* Selectively unlink the sequence. */
1603 if (q != PREV_INSN (BB_HEAD (c)))
1604 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1606 e->flags |= EDGE_FALLTHRU;
1609 /* Should move basic block BB after basic block AFTER. NIY. */
1611 static bool
1612 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1613 basic_block after ATTRIBUTE_UNUSED)
1615 return false;
1618 /* Split a (typically critical) edge. Return the new block.
1619 The edge must not be abnormal.
1621 ??? The code generally expects to be called on critical edges.
1622 The case of a block ending in an unconditional jump to a
1623 block with multiple predecessors is not handled optimally. */
1625 static basic_block
1626 rtl_split_edge (edge edge_in)
1628 basic_block bb;
1629 rtx before;
1631 /* Abnormal edges cannot be split. */
1632 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1634 /* We are going to place the new block in front of edge destination.
1635 Avoid existence of fallthru predecessors. */
1636 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1638 edge e = find_fallthru_edge (edge_in->dest->preds);
1640 if (e)
1641 force_nonfallthru (e);
1644 /* Create the basic block note. */
1645 if (edge_in->dest != EXIT_BLOCK_PTR)
1646 before = BB_HEAD (edge_in->dest);
1647 else
1648 before = NULL_RTX;
1650 /* If this is a fall through edge to the exit block, the blocks might be
1651 not adjacent, and the right place is after the source. */
1652 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1654 before = NEXT_INSN (BB_END (edge_in->src));
1655 bb = create_basic_block (before, NULL, edge_in->src);
1656 BB_COPY_PARTITION (bb, edge_in->src);
1658 else
1660 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1661 /* ??? Why not edge_in->dest->prev_bb here? */
1662 BB_COPY_PARTITION (bb, edge_in->dest);
1665 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1667 /* For non-fallthru edges, we must adjust the predecessor's
1668 jump instruction to target our new block. */
1669 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1671 edge redirected = redirect_edge_and_branch (edge_in, bb);
1672 gcc_assert (redirected);
1674 else
1676 if (edge_in->src != ENTRY_BLOCK_PTR)
1678 /* For asm goto even splitting of fallthru edge might
1679 need insn patching, as other labels might point to the
1680 old label. */
1681 rtx last = BB_END (edge_in->src);
1682 if (last
1683 && JUMP_P (last)
1684 && edge_in->dest != EXIT_BLOCK_PTR
1685 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1686 && patch_jump_insn (last, before, bb))
1687 df_set_bb_dirty (edge_in->src);
1689 redirect_edge_succ (edge_in, bb);
1692 return bb;
1695 /* Queue instructions for insertion on an edge between two basic blocks.
1696 The new instructions and basic blocks (if any) will not appear in the
1697 CFG until commit_edge_insertions is called. */
1699 void
1700 insert_insn_on_edge (rtx pattern, edge e)
1702 /* We cannot insert instructions on an abnormal critical edge.
1703 It will be easier to find the culprit if we die now. */
1704 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1706 if (e->insns.r == NULL_RTX)
1707 start_sequence ();
1708 else
1709 push_to_sequence (e->insns.r);
1711 emit_insn (pattern);
1713 e->insns.r = get_insns ();
1714 end_sequence ();
1717 /* Update the CFG for the instructions queued on edge E. */
1719 void
1720 commit_one_edge_insertion (edge e)
1722 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1723 basic_block bb;
1725 /* Pull the insns off the edge now since the edge might go away. */
1726 insns = e->insns.r;
1727 e->insns.r = NULL_RTX;
1729 /* Figure out where to put these insns. If the destination has
1730 one predecessor, insert there. Except for the exit block. */
1731 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1733 bb = e->dest;
1735 /* Get the location correct wrt a code label, and "nice" wrt
1736 a basic block note, and before everything else. */
1737 tmp = BB_HEAD (bb);
1738 if (LABEL_P (tmp))
1739 tmp = NEXT_INSN (tmp);
1740 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1741 tmp = NEXT_INSN (tmp);
1742 if (tmp == BB_HEAD (bb))
1743 before = tmp;
1744 else if (tmp)
1745 after = PREV_INSN (tmp);
1746 else
1747 after = get_last_insn ();
1750 /* If the source has one successor and the edge is not abnormal,
1751 insert there. Except for the entry block. */
1752 else if ((e->flags & EDGE_ABNORMAL) == 0
1753 && single_succ_p (e->src)
1754 && e->src != ENTRY_BLOCK_PTR)
1756 bb = e->src;
1758 /* It is possible to have a non-simple jump here. Consider a target
1759 where some forms of unconditional jumps clobber a register. This
1760 happens on the fr30 for example.
1762 We know this block has a single successor, so we can just emit
1763 the queued insns before the jump. */
1764 if (JUMP_P (BB_END (bb)))
1765 before = BB_END (bb);
1766 else
1768 /* We'd better be fallthru, or we've lost track of what's what. */
1769 gcc_assert (e->flags & EDGE_FALLTHRU);
1771 after = BB_END (bb);
1775 /* Otherwise we must split the edge. */
1776 else
1778 bb = split_edge (e);
1779 after = BB_END (bb);
1781 if (flag_reorder_blocks_and_partition
1782 && targetm_common.have_named_sections
1783 && e->src != ENTRY_BLOCK_PTR
1784 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1785 && !(e->flags & EDGE_CROSSING)
1786 && JUMP_P (after)
1787 && !any_condjump_p (after)
1788 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1789 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1792 /* Now that we've found the spot, do the insertion. */
1793 if (before)
1795 emit_insn_before_noloc (insns, before, bb);
1796 last = prev_nonnote_insn (before);
1798 else
1799 last = emit_insn_after_noloc (insns, after, bb);
1801 if (returnjump_p (last))
1803 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1804 This is not currently a problem because this only happens
1805 for the (single) epilogue, which already has a fallthru edge
1806 to EXIT. */
1808 e = single_succ_edge (bb);
1809 gcc_assert (e->dest == EXIT_BLOCK_PTR
1810 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1812 e->flags &= ~EDGE_FALLTHRU;
1813 emit_barrier_after (last);
1815 if (before)
1816 delete_insn (before);
1818 else
1819 gcc_assert (!JUMP_P (last));
1822 /* Update the CFG for all queued instructions. */
1824 void
1825 commit_edge_insertions (void)
1827 basic_block bb;
1829 #ifdef ENABLE_CHECKING
1830 verify_flow_info ();
1831 #endif
1833 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1835 edge e;
1836 edge_iterator ei;
1838 FOR_EACH_EDGE (e, ei, bb->succs)
1839 if (e->insns.r)
1840 commit_one_edge_insertion (e);
1845 /* Print out RTL-specific basic block information (live information
1846 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1847 documented in dumpfile.h. */
1849 static void
1850 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
1852 rtx insn;
1853 rtx last;
1854 char *s_indent;
1856 s_indent = (char *) alloca ((size_t) indent + 1);
1857 memset (s_indent, ' ', (size_t) indent);
1858 s_indent[indent] = '\0';
1860 if (df && (flags & TDF_DETAILS))
1862 df_dump_top (bb, outf);
1863 putc ('\n', outf);
1866 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1867 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1868 insn = NEXT_INSN (insn))
1870 if (flags & TDF_DETAILS)
1871 df_dump_insn_top (insn, outf);
1872 if (! (flags & TDF_SLIM))
1873 print_rtl_single (outf, insn);
1874 else
1875 dump_insn_slim (outf, insn);
1876 if (flags & TDF_DETAILS)
1877 df_dump_insn_bottom (insn, outf);
1880 if (df && (flags & TDF_DETAILS))
1882 df_dump_bottom (bb, outf);
1883 putc ('\n', outf);
1888 /* Like dump_function_to_file, but for RTL. Print out dataflow information
1889 for the start of each basic block. FLAGS are the TDF_* masks documented
1890 in dumpfile.h. */
1892 void
1893 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
1895 const_rtx tmp_rtx;
1896 if (rtx_first == 0)
1897 fprintf (outf, "(nil)\n");
1898 else
1900 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1901 int max_uid = get_max_uid ();
1902 basic_block *start = XCNEWVEC (basic_block, max_uid);
1903 basic_block *end = XCNEWVEC (basic_block, max_uid);
1904 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1905 basic_block bb;
1907 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1908 insns, but the CFG is not maintained so the basic block info
1909 is not reliable. Therefore it's omitted from the dumps. */
1910 if (! (cfun->curr_properties & PROP_cfg))
1911 flags &= ~TDF_BLOCKS;
1913 if (df)
1914 df_dump_start (outf);
1916 if (flags & TDF_BLOCKS)
1918 FOR_EACH_BB_REVERSE (bb)
1920 rtx x;
1922 start[INSN_UID (BB_HEAD (bb))] = bb;
1923 end[INSN_UID (BB_END (bb))] = bb;
1924 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1926 enum bb_state state = IN_MULTIPLE_BB;
1928 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1929 state = IN_ONE_BB;
1930 in_bb_p[INSN_UID (x)] = state;
1932 if (x == BB_END (bb))
1933 break;
1938 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1940 if (flags & TDF_BLOCKS)
1942 bb = start[INSN_UID (tmp_rtx)];
1943 if (bb != NULL)
1945 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1946 if (df && (flags & TDF_DETAILS))
1947 df_dump_top (bb, outf);
1950 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1951 && !NOTE_P (tmp_rtx)
1952 && !BARRIER_P (tmp_rtx))
1953 fprintf (outf, ";; Insn is not within a basic block\n");
1954 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1955 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1958 if (flags & TDF_DETAILS)
1959 df_dump_insn_top (tmp_rtx, outf);
1960 if (! (flags & TDF_SLIM))
1961 print_rtl_single (outf, tmp_rtx);
1962 else
1963 dump_insn_slim (outf, tmp_rtx);
1964 if (flags & TDF_DETAILS)
1965 df_dump_insn_bottom (tmp_rtx, outf);
1967 if (flags & TDF_BLOCKS)
1969 bb = end[INSN_UID (tmp_rtx)];
1970 if (bb != NULL)
1972 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
1973 if (df && (flags & TDF_DETAILS))
1974 df_dump_bottom (bb, outf);
1975 putc ('\n', outf);
1980 free (start);
1981 free (end);
1982 free (in_bb_p);
1985 if (crtl->epilogue_delay_list != 0)
1987 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1988 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1989 tmp_rtx = XEXP (tmp_rtx, 1))
1990 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1994 /* Update the branch probability of BB if a REG_BR_PROB is present. */
1996 void
1997 update_br_prob_note (basic_block bb)
1999 rtx note;
2000 if (!JUMP_P (BB_END (bb)))
2001 return;
2002 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2003 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2004 return;
2005 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2008 /* Get the last insn associated with block BB (that includes barriers and
2009 tablejumps after BB). */
2011 get_last_bb_insn (basic_block bb)
2013 rtx tmp;
2014 rtx end = BB_END (bb);
2016 /* Include any jump table following the basic block. */
2017 if (tablejump_p (end, NULL, &tmp))
2018 end = tmp;
2020 /* Include any barriers that may follow the basic block. */
2021 tmp = next_nonnote_insn_bb (end);
2022 while (tmp && BARRIER_P (tmp))
2024 end = tmp;
2025 tmp = next_nonnote_insn_bb (end);
2028 return end;
2031 /* Verify the CFG and RTL consistency common for both underlying RTL and
2032 cfglayout RTL.
2034 Currently it does following checks:
2036 - overlapping of basic blocks
2037 - insns with wrong BLOCK_FOR_INSN pointers
2038 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2039 - tails of basic blocks (ensure that boundary is necessary)
2040 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2041 and NOTE_INSN_BASIC_BLOCK
2042 - verify that no fall_thru edge crosses hot/cold partition boundaries
2043 - verify that there are no pending RTL branch predictions
2045 In future it can be extended check a lot of other stuff as well
2046 (reachability of basic blocks, life information, etc. etc.). */
2048 static int
2049 rtl_verify_flow_info_1 (void)
2051 rtx x;
2052 int err = 0;
2053 basic_block bb;
2055 /* Check the general integrity of the basic blocks. */
2056 FOR_EACH_BB_REVERSE (bb)
2058 rtx insn;
2060 if (!(bb->flags & BB_RTL))
2062 error ("BB_RTL flag not set for block %d", bb->index);
2063 err = 1;
2066 FOR_BB_INSNS (bb, insn)
2067 if (BLOCK_FOR_INSN (insn) != bb)
2069 error ("insn %d basic block pointer is %d, should be %d",
2070 INSN_UID (insn),
2071 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2072 bb->index);
2073 err = 1;
2076 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2077 if (!BARRIER_P (insn)
2078 && BLOCK_FOR_INSN (insn) != NULL)
2080 error ("insn %d in header of bb %d has non-NULL basic block",
2081 INSN_UID (insn), bb->index);
2082 err = 1;
2084 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2085 if (!BARRIER_P (insn)
2086 && BLOCK_FOR_INSN (insn) != NULL)
2088 error ("insn %d in footer of bb %d has non-NULL basic block",
2089 INSN_UID (insn), bb->index);
2090 err = 1;
2094 /* Now check the basic blocks (boundaries etc.) */
2095 FOR_EACH_BB_REVERSE (bb)
2097 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
2098 edge e, fallthru = NULL;
2099 rtx note;
2100 edge_iterator ei;
2102 if (JUMP_P (BB_END (bb))
2103 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2104 && EDGE_COUNT (bb->succs) >= 2
2105 && any_condjump_p (BB_END (bb)))
2107 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2108 && profile_status != PROFILE_ABSENT)
2110 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2111 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2112 err = 1;
2115 FOR_EACH_EDGE (e, ei, bb->succs)
2117 bool is_crossing;
2119 if (e->flags & EDGE_FALLTHRU)
2120 n_fallthru++, fallthru = e;
2122 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2123 && e->src != ENTRY_BLOCK_PTR
2124 && e->dest != EXIT_BLOCK_PTR);
2125 if (e->flags & EDGE_CROSSING)
2127 if (!is_crossing)
2129 error ("EDGE_CROSSING incorrectly set across same section");
2130 err = 1;
2132 if (e->flags & EDGE_FALLTHRU)
2134 error ("fallthru edge crosses section boundary (bb %i)",
2135 e->src->index);
2136 err = 1;
2138 if (e->flags & EDGE_EH)
2140 error ("EH edge crosses section boundary (bb %i)",
2141 e->src->index);
2142 err = 1;
2145 else if (is_crossing)
2147 error ("EDGE_CROSSING missing across section boundary");
2148 err = 1;
2151 if ((e->flags & ~(EDGE_DFS_BACK
2152 | EDGE_CAN_FALLTHRU
2153 | EDGE_IRREDUCIBLE_LOOP
2154 | EDGE_LOOP_EXIT
2155 | EDGE_CROSSING
2156 | EDGE_PRESERVE)) == 0)
2157 n_branch++;
2159 if (e->flags & EDGE_ABNORMAL_CALL)
2160 n_call++;
2162 if (e->flags & EDGE_EH)
2163 n_eh++;
2164 else if (e->flags & EDGE_ABNORMAL)
2165 n_abnormal++;
2168 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2170 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
2171 err = 1;
2173 if (n_eh > 1)
2175 error ("too many eh edges %i", bb->index);
2176 err = 1;
2178 if (n_branch
2179 && (!JUMP_P (BB_END (bb))
2180 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2181 || any_condjump_p (BB_END (bb))))))
2183 error ("too many outgoing branch edges from bb %i", bb->index);
2184 err = 1;
2186 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2188 error ("fallthru edge after unconditional jump %i", bb->index);
2189 err = 1;
2191 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2193 error ("wrong number of branch edges after unconditional jump %i",
2194 bb->index);
2195 err = 1;
2197 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2198 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2200 error ("wrong amount of branch edges after conditional jump %i",
2201 bb->index);
2202 err = 1;
2204 if (n_call && !CALL_P (BB_END (bb)))
2206 error ("call edges for non-call insn in bb %i", bb->index);
2207 err = 1;
2209 if (n_abnormal
2210 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
2211 && (!JUMP_P (BB_END (bb))
2212 || any_condjump_p (BB_END (bb))
2213 || any_uncondjump_p (BB_END (bb))))
2215 error ("abnormal edges for no purpose in bb %i", bb->index);
2216 err = 1;
2219 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2220 /* We may have a barrier inside a basic block before dead code
2221 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2222 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2224 debug_rtx (x);
2225 if (! BLOCK_FOR_INSN (x))
2226 error
2227 ("insn %d inside basic block %d but block_for_insn is NULL",
2228 INSN_UID (x), bb->index);
2229 else
2230 error
2231 ("insn %d inside basic block %d but block_for_insn is %i",
2232 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2234 err = 1;
2237 /* OK pointers are correct. Now check the header of basic
2238 block. It ought to contain optional CODE_LABEL followed
2239 by NOTE_BASIC_BLOCK. */
2240 x = BB_HEAD (bb);
2241 if (LABEL_P (x))
2243 if (BB_END (bb) == x)
2245 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2246 bb->index);
2247 err = 1;
2250 x = NEXT_INSN (x);
2253 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2255 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2256 bb->index);
2257 err = 1;
2260 if (BB_END (bb) == x)
2261 /* Do checks for empty blocks here. */
2263 else
2264 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2266 if (NOTE_INSN_BASIC_BLOCK_P (x))
2268 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2269 INSN_UID (x), bb->index);
2270 err = 1;
2273 if (x == BB_END (bb))
2274 break;
2276 if (control_flow_insn_p (x))
2278 error ("in basic block %d:", bb->index);
2279 fatal_insn ("flow control insn inside a basic block", x);
2284 /* Clean up. */
2285 return err;
2288 /* Verify the CFG and RTL consistency common for both underlying RTL and
2289 cfglayout RTL.
2291 Currently it does following checks:
2292 - all checks of rtl_verify_flow_info_1
2293 - test head/end pointers
2294 - check that all insns are in the basic blocks
2295 (except the switch handling code, barriers and notes)
2296 - check that all returns are followed by barriers
2297 - check that all fallthru edge points to the adjacent blocks. */
2299 static int
2300 rtl_verify_flow_info (void)
2302 basic_block bb;
2303 int err = rtl_verify_flow_info_1 ();
2304 rtx x;
2305 rtx last_head = get_last_insn ();
2306 basic_block *bb_info;
2307 int num_bb_notes;
2308 const rtx rtx_first = get_insns ();
2309 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2310 const int max_uid = get_max_uid ();
2312 bb_info = XCNEWVEC (basic_block, max_uid);
2314 FOR_EACH_BB_REVERSE (bb)
2316 edge e;
2317 rtx head = BB_HEAD (bb);
2318 rtx end = BB_END (bb);
2320 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2322 /* Verify the end of the basic block is in the INSN chain. */
2323 if (x == end)
2324 break;
2326 /* And that the code outside of basic blocks has NULL bb field. */
2327 if (!BARRIER_P (x)
2328 && BLOCK_FOR_INSN (x) != NULL)
2330 error ("insn %d outside of basic blocks has non-NULL bb field",
2331 INSN_UID (x));
2332 err = 1;
2336 if (!x)
2338 error ("end insn %d for block %d not found in the insn stream",
2339 INSN_UID (end), bb->index);
2340 err = 1;
2343 /* Work backwards from the end to the head of the basic block
2344 to verify the head is in the RTL chain. */
2345 for (; x != NULL_RTX; x = PREV_INSN (x))
2347 /* While walking over the insn chain, verify insns appear
2348 in only one basic block. */
2349 if (bb_info[INSN_UID (x)] != NULL)
2351 error ("insn %d is in multiple basic blocks (%d and %d)",
2352 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2353 err = 1;
2356 bb_info[INSN_UID (x)] = bb;
2358 if (x == head)
2359 break;
2361 if (!x)
2363 error ("head insn %d for block %d not found in the insn stream",
2364 INSN_UID (head), bb->index);
2365 err = 1;
2368 last_head = PREV_INSN (x);
2370 e = find_fallthru_edge (bb->succs);
2371 if (!e)
2373 rtx insn;
2375 /* Ensure existence of barrier in BB with no fallthru edges. */
2376 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2378 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2380 error ("missing barrier after block %i", bb->index);
2381 err = 1;
2382 break;
2384 if (BARRIER_P (insn))
2385 break;
2388 else if (e->src != ENTRY_BLOCK_PTR
2389 && e->dest != EXIT_BLOCK_PTR)
2391 rtx insn;
2393 if (e->src->next_bb != e->dest)
2395 error
2396 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2397 e->src->index, e->dest->index);
2398 err = 1;
2400 else
2401 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2402 insn = NEXT_INSN (insn))
2403 if (BARRIER_P (insn) || INSN_P (insn))
2405 error ("verify_flow_info: Incorrect fallthru %i->%i",
2406 e->src->index, e->dest->index);
2407 fatal_insn ("wrong insn in the fallthru edge", insn);
2408 err = 1;
2413 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2415 /* Check that the code before the first basic block has NULL
2416 bb field. */
2417 if (!BARRIER_P (x)
2418 && BLOCK_FOR_INSN (x) != NULL)
2420 error ("insn %d outside of basic blocks has non-NULL bb field",
2421 INSN_UID (x));
2422 err = 1;
2425 free (bb_info);
2427 num_bb_notes = 0;
2428 last_bb_seen = ENTRY_BLOCK_PTR;
2430 for (x = rtx_first; x; x = NEXT_INSN (x))
2432 if (NOTE_INSN_BASIC_BLOCK_P (x))
2434 bb = NOTE_BASIC_BLOCK (x);
2436 num_bb_notes++;
2437 if (bb != last_bb_seen->next_bb)
2438 internal_error ("basic blocks not laid down consecutively");
2440 curr_bb = last_bb_seen = bb;
2443 if (!curr_bb)
2445 switch (GET_CODE (x))
2447 case BARRIER:
2448 case NOTE:
2449 break;
2451 case CODE_LABEL:
2452 /* An addr_vec is placed outside any basic block. */
2453 if (NEXT_INSN (x)
2454 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2455 x = NEXT_INSN (x);
2457 /* But in any case, non-deletable labels can appear anywhere. */
2458 break;
2460 default:
2461 fatal_insn ("insn outside basic block", x);
2465 if (JUMP_P (x)
2466 && returnjump_p (x) && ! condjump_p (x)
2467 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2468 fatal_insn ("return not followed by barrier", x);
2469 if (curr_bb && x == BB_END (curr_bb))
2470 curr_bb = NULL;
2473 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2474 internal_error
2475 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2476 num_bb_notes, n_basic_blocks);
2478 return err;
2481 /* Assume that the preceding pass has possibly eliminated jump instructions
2482 or converted the unconditional jumps. Eliminate the edges from CFG.
2483 Return true if any edges are eliminated. */
2485 bool
2486 purge_dead_edges (basic_block bb)
2488 edge e;
2489 rtx insn = BB_END (bb), note;
2490 bool purged = false;
2491 bool found;
2492 edge_iterator ei;
2494 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2496 insn = PREV_INSN (insn);
2497 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2499 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2500 if (NONJUMP_INSN_P (insn)
2501 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2503 rtx eqnote;
2505 if (! may_trap_p (PATTERN (insn))
2506 || ((eqnote = find_reg_equal_equiv_note (insn))
2507 && ! may_trap_p (XEXP (eqnote, 0))))
2508 remove_note (insn, note);
2511 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2512 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2514 bool remove = false;
2516 /* There are three types of edges we need to handle correctly here: EH
2517 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2518 latter can appear when nonlocal gotos are used. */
2519 if (e->flags & EDGE_ABNORMAL_CALL)
2521 if (!CALL_P (insn))
2522 remove = true;
2523 else if (can_nonlocal_goto (insn))
2525 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2527 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2529 else
2530 remove = true;
2532 else if (e->flags & EDGE_EH)
2533 remove = !can_throw_internal (insn);
2535 if (remove)
2537 remove_edge (e);
2538 df_set_bb_dirty (bb);
2539 purged = true;
2541 else
2542 ei_next (&ei);
2545 if (JUMP_P (insn))
2547 rtx note;
2548 edge b,f;
2549 edge_iterator ei;
2551 /* We do care only about conditional jumps and simplejumps. */
2552 if (!any_condjump_p (insn)
2553 && !returnjump_p (insn)
2554 && !simplejump_p (insn))
2555 return purged;
2557 /* Branch probability/prediction notes are defined only for
2558 condjumps. We've possibly turned condjump into simplejump. */
2559 if (simplejump_p (insn))
2561 note = find_reg_note (insn, REG_BR_PROB, NULL);
2562 if (note)
2563 remove_note (insn, note);
2564 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2565 remove_note (insn, note);
2568 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2570 /* Avoid abnormal flags to leak from computed jumps turned
2571 into simplejumps. */
2573 e->flags &= ~EDGE_ABNORMAL;
2575 /* See if this edge is one we should keep. */
2576 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2577 /* A conditional jump can fall through into the next
2578 block, so we should keep the edge. */
2580 ei_next (&ei);
2581 continue;
2583 else if (e->dest != EXIT_BLOCK_PTR
2584 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2585 /* If the destination block is the target of the jump,
2586 keep the edge. */
2588 ei_next (&ei);
2589 continue;
2591 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2592 /* If the destination block is the exit block, and this
2593 instruction is a return, then keep the edge. */
2595 ei_next (&ei);
2596 continue;
2598 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2599 /* Keep the edges that correspond to exceptions thrown by
2600 this instruction and rematerialize the EDGE_ABNORMAL
2601 flag we just cleared above. */
2603 e->flags |= EDGE_ABNORMAL;
2604 ei_next (&ei);
2605 continue;
2608 /* We do not need this edge. */
2609 df_set_bb_dirty (bb);
2610 purged = true;
2611 remove_edge (e);
2614 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2615 return purged;
2617 if (dump_file)
2618 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2620 if (!optimize)
2621 return purged;
2623 /* Redistribute probabilities. */
2624 if (single_succ_p (bb))
2626 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2627 single_succ_edge (bb)->count = bb->count;
2629 else
2631 note = find_reg_note (insn, REG_BR_PROB, NULL);
2632 if (!note)
2633 return purged;
2635 b = BRANCH_EDGE (bb);
2636 f = FALLTHRU_EDGE (bb);
2637 b->probability = INTVAL (XEXP (note, 0));
2638 f->probability = REG_BR_PROB_BASE - b->probability;
2639 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2640 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2643 return purged;
2645 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2647 /* First, there should not be any EH or ABCALL edges resulting
2648 from non-local gotos and the like. If there were, we shouldn't
2649 have created the sibcall in the first place. Second, there
2650 should of course never have been a fallthru edge. */
2651 gcc_assert (single_succ_p (bb));
2652 gcc_assert (single_succ_edge (bb)->flags
2653 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2655 return 0;
2658 /* If we don't see a jump insn, we don't know exactly why the block would
2659 have been broken at this point. Look for a simple, non-fallthru edge,
2660 as these are only created by conditional branches. If we find such an
2661 edge we know that there used to be a jump here and can then safely
2662 remove all non-fallthru edges. */
2663 found = false;
2664 FOR_EACH_EDGE (e, ei, bb->succs)
2665 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2667 found = true;
2668 break;
2671 if (!found)
2672 return purged;
2674 /* Remove all but the fake and fallthru edges. The fake edge may be
2675 the only successor for this block in the case of noreturn
2676 calls. */
2677 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2679 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2681 df_set_bb_dirty (bb);
2682 remove_edge (e);
2683 purged = true;
2685 else
2686 ei_next (&ei);
2689 gcc_assert (single_succ_p (bb));
2691 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2692 single_succ_edge (bb)->count = bb->count;
2694 if (dump_file)
2695 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2696 bb->index);
2697 return purged;
2700 /* Search all basic blocks for potentially dead edges and purge them. Return
2701 true if some edge has been eliminated. */
2703 bool
2704 purge_all_dead_edges (void)
2706 int purged = false;
2707 basic_block bb;
2709 FOR_EACH_BB (bb)
2711 bool purged_here = purge_dead_edges (bb);
2713 purged |= purged_here;
2716 return purged;
2719 /* This is used by a few passes that emit some instructions after abnormal
2720 calls, moving the basic block's end, while they in fact do want to emit
2721 them on the fallthru edge. Look for abnormal call edges, find backward
2722 the call in the block and insert the instructions on the edge instead.
2724 Similarly, handle instructions throwing exceptions internally.
2726 Return true when instructions have been found and inserted on edges. */
2728 bool
2729 fixup_abnormal_edges (void)
2731 bool inserted = false;
2732 basic_block bb;
2734 FOR_EACH_BB (bb)
2736 edge e;
2737 edge_iterator ei;
2739 /* Look for cases we are interested in - calls or instructions causing
2740 exceptions. */
2741 FOR_EACH_EDGE (e, ei, bb->succs)
2742 if ((e->flags & EDGE_ABNORMAL_CALL)
2743 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2744 == (EDGE_ABNORMAL | EDGE_EH)))
2745 break;
2747 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2749 rtx insn;
2751 /* Get past the new insns generated. Allow notes, as the insns
2752 may be already deleted. */
2753 insn = BB_END (bb);
2754 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2755 && !can_throw_internal (insn)
2756 && insn != BB_HEAD (bb))
2757 insn = PREV_INSN (insn);
2759 if (CALL_P (insn) || can_throw_internal (insn))
2761 rtx stop, next;
2763 e = find_fallthru_edge (bb->succs);
2765 stop = NEXT_INSN (BB_END (bb));
2766 BB_END (bb) = insn;
2768 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2770 next = NEXT_INSN (insn);
2771 if (INSN_P (insn))
2773 delete_insn (insn);
2775 /* Sometimes there's still the return value USE.
2776 If it's placed after a trapping call (i.e. that
2777 call is the last insn anyway), we have no fallthru
2778 edge. Simply delete this use and don't try to insert
2779 on the non-existent edge. */
2780 if (GET_CODE (PATTERN (insn)) != USE)
2782 /* We're not deleting it, we're moving it. */
2783 INSN_DELETED_P (insn) = 0;
2784 PREV_INSN (insn) = NULL_RTX;
2785 NEXT_INSN (insn) = NULL_RTX;
2787 insert_insn_on_edge (insn, e);
2788 inserted = true;
2791 else if (!BARRIER_P (insn))
2792 set_block_for_insn (insn, NULL);
2796 /* It may be that we don't find any trapping insn. In this
2797 case we discovered quite late that the insn that had been
2798 marked as can_throw_internal in fact couldn't trap at all.
2799 So we should in fact delete the EH edges out of the block. */
2800 else
2801 purge_dead_edges (bb);
2805 return inserted;
2808 /* Cut the insns from FIRST to LAST out of the insns stream. */
2811 unlink_insn_chain (rtx first, rtx last)
2813 rtx prevfirst = PREV_INSN (first);
2814 rtx nextlast = NEXT_INSN (last);
2816 PREV_INSN (first) = NULL;
2817 NEXT_INSN (last) = NULL;
2818 if (prevfirst)
2819 NEXT_INSN (prevfirst) = nextlast;
2820 if (nextlast)
2821 PREV_INSN (nextlast) = prevfirst;
2822 else
2823 set_last_insn (prevfirst);
2824 if (!prevfirst)
2825 set_first_insn (nextlast);
2826 return first;
2829 /* Skip over inter-block insns occurring after BB which are typically
2830 associated with BB (e.g., barriers). If there are any such insns,
2831 we return the last one. Otherwise, we return the end of BB. */
2833 static rtx
2834 skip_insns_after_block (basic_block bb)
2836 rtx insn, last_insn, next_head, prev;
2838 next_head = NULL_RTX;
2839 if (bb->next_bb != EXIT_BLOCK_PTR)
2840 next_head = BB_HEAD (bb->next_bb);
2842 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2844 if (insn == next_head)
2845 break;
2847 switch (GET_CODE (insn))
2849 case BARRIER:
2850 last_insn = insn;
2851 continue;
2853 case NOTE:
2854 switch (NOTE_KIND (insn))
2856 case NOTE_INSN_BLOCK_END:
2857 gcc_unreachable ();
2858 continue;
2859 default:
2860 continue;
2861 break;
2863 break;
2865 case CODE_LABEL:
2866 if (NEXT_INSN (insn)
2867 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2869 insn = NEXT_INSN (insn);
2870 last_insn = insn;
2871 continue;
2873 break;
2875 default:
2876 break;
2879 break;
2882 /* It is possible to hit contradictory sequence. For instance:
2884 jump_insn
2885 NOTE_INSN_BLOCK_BEG
2886 barrier
2888 Where barrier belongs to jump_insn, but the note does not. This can be
2889 created by removing the basic block originally following
2890 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2892 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2894 prev = PREV_INSN (insn);
2895 if (NOTE_P (insn))
2896 switch (NOTE_KIND (insn))
2898 case NOTE_INSN_BLOCK_END:
2899 gcc_unreachable ();
2900 break;
2901 case NOTE_INSN_DELETED:
2902 case NOTE_INSN_DELETED_LABEL:
2903 case NOTE_INSN_DELETED_DEBUG_LABEL:
2904 continue;
2905 default:
2906 reorder_insns (insn, insn, last_insn);
2910 return last_insn;
2913 /* Locate or create a label for a given basic block. */
2915 static rtx
2916 label_for_bb (basic_block bb)
2918 rtx label = BB_HEAD (bb);
2920 if (!LABEL_P (label))
2922 if (dump_file)
2923 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2925 label = block_label (bb);
2928 return label;
2931 /* Locate the effective beginning and end of the insn chain for each
2932 block, as defined by skip_insns_after_block above. */
2934 static void
2935 record_effective_endpoints (void)
2937 rtx next_insn;
2938 basic_block bb;
2939 rtx insn;
2941 for (insn = get_insns ();
2942 insn
2943 && NOTE_P (insn)
2944 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2945 insn = NEXT_INSN (insn))
2946 continue;
2947 /* No basic blocks at all? */
2948 gcc_assert (insn);
2950 if (PREV_INSN (insn))
2951 cfg_layout_function_header =
2952 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2953 else
2954 cfg_layout_function_header = NULL_RTX;
2956 next_insn = get_insns ();
2957 FOR_EACH_BB (bb)
2959 rtx end;
2961 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
2962 BB_HEADER (bb) = unlink_insn_chain (next_insn,
2963 PREV_INSN (BB_HEAD (bb)));
2964 end = skip_insns_after_block (bb);
2965 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
2966 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
2967 next_insn = NEXT_INSN (BB_END (bb));
2970 cfg_layout_function_footer = next_insn;
2971 if (cfg_layout_function_footer)
2972 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
2975 static unsigned int
2976 into_cfg_layout_mode (void)
2978 cfg_layout_initialize (0);
2979 return 0;
2982 static unsigned int
2983 outof_cfg_layout_mode (void)
2985 basic_block bb;
2987 FOR_EACH_BB (bb)
2988 if (bb->next_bb != EXIT_BLOCK_PTR)
2989 bb->aux = bb->next_bb;
2991 cfg_layout_finalize ();
2993 return 0;
2996 struct rtl_opt_pass pass_into_cfg_layout_mode =
2999 RTL_PASS,
3000 "into_cfglayout", /* name */
3001 NULL, /* gate */
3002 into_cfg_layout_mode, /* execute */
3003 NULL, /* sub */
3004 NULL, /* next */
3005 0, /* static_pass_number */
3006 TV_CFG, /* tv_id */
3007 0, /* properties_required */
3008 PROP_cfglayout, /* properties_provided */
3009 0, /* properties_destroyed */
3010 0, /* todo_flags_start */
3011 0 /* todo_flags_finish */
3015 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3018 RTL_PASS,
3019 "outof_cfglayout", /* name */
3020 NULL, /* gate */
3021 outof_cfg_layout_mode, /* execute */
3022 NULL, /* sub */
3023 NULL, /* next */
3024 0, /* static_pass_number */
3025 TV_CFG, /* tv_id */
3026 0, /* properties_required */
3027 0, /* properties_provided */
3028 PROP_cfglayout, /* properties_destroyed */
3029 0, /* todo_flags_start */
3030 0 /* todo_flags_finish */
3035 /* Link the basic blocks in the correct order, compacting the basic
3036 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3037 function also clears the basic block header and footer fields.
3039 This function is usually called after a pass (e.g. tracer) finishes
3040 some transformations while in cfglayout mode. The required sequence
3041 of the basic blocks is in a linked list along the bb->aux field.
3042 This functions re-links the basic block prev_bb and next_bb pointers
3043 accordingly, and it compacts and renumbers the blocks.
3045 FIXME: This currently works only for RTL, but the only RTL-specific
3046 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3047 to GIMPLE a long time ago, but it doesn't relink the basic block
3048 chain. It could do that (to give better initial RTL) if this function
3049 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3051 void
3052 relink_block_chain (bool stay_in_cfglayout_mode)
3054 basic_block bb, prev_bb;
3055 int index;
3057 /* Maybe dump the re-ordered sequence. */
3058 if (dump_file)
3060 fprintf (dump_file, "Reordered sequence:\n");
3061 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3063 bb = (basic_block) bb->aux, index++)
3065 fprintf (dump_file, " %i ", index);
3066 if (get_bb_original (bb))
3067 fprintf (dump_file, "duplicate of %i ",
3068 get_bb_original (bb)->index);
3069 else if (forwarder_block_p (bb)
3070 && !LABEL_P (BB_HEAD (bb)))
3071 fprintf (dump_file, "compensation ");
3072 else
3073 fprintf (dump_file, "bb %i ", bb->index);
3074 fprintf (dump_file, " [%i]\n", bb->frequency);
3078 /* Now reorder the blocks. */
3079 prev_bb = ENTRY_BLOCK_PTR;
3080 bb = ENTRY_BLOCK_PTR->next_bb;
3081 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3083 bb->prev_bb = prev_bb;
3084 prev_bb->next_bb = bb;
3086 prev_bb->next_bb = EXIT_BLOCK_PTR;
3087 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3089 /* Then, clean up the aux fields. */
3090 FOR_ALL_BB (bb)
3092 bb->aux = NULL;
3093 if (!stay_in_cfglayout_mode)
3094 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3097 /* Maybe reset the original copy tables, they are not valid anymore
3098 when we renumber the basic blocks in compact_blocks. If we are
3099 are going out of cfglayout mode, don't re-allocate the tables. */
3100 free_original_copy_tables ();
3101 if (stay_in_cfglayout_mode)
3102 initialize_original_copy_tables ();
3104 /* Finally, put basic_block_info in the new order. */
3105 compact_blocks ();
3109 /* Given a reorder chain, rearrange the code to match. */
3111 static void
3112 fixup_reorder_chain (void)
3114 basic_block bb;
3115 rtx insn = NULL;
3117 if (cfg_layout_function_header)
3119 set_first_insn (cfg_layout_function_header);
3120 insn = cfg_layout_function_header;
3121 while (NEXT_INSN (insn))
3122 insn = NEXT_INSN (insn);
3125 /* First do the bulk reordering -- rechain the blocks without regard to
3126 the needed changes to jumps and labels. */
3128 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3130 if (BB_HEADER (bb))
3132 if (insn)
3133 NEXT_INSN (insn) = BB_HEADER (bb);
3134 else
3135 set_first_insn (BB_HEADER (bb));
3136 PREV_INSN (BB_HEADER (bb)) = insn;
3137 insn = BB_HEADER (bb);
3138 while (NEXT_INSN (insn))
3139 insn = NEXT_INSN (insn);
3141 if (insn)
3142 NEXT_INSN (insn) = BB_HEAD (bb);
3143 else
3144 set_first_insn (BB_HEAD (bb));
3145 PREV_INSN (BB_HEAD (bb)) = insn;
3146 insn = BB_END (bb);
3147 if (BB_FOOTER (bb))
3149 NEXT_INSN (insn) = BB_FOOTER (bb);
3150 PREV_INSN (BB_FOOTER (bb)) = insn;
3151 while (NEXT_INSN (insn))
3152 insn = NEXT_INSN (insn);
3156 NEXT_INSN (insn) = cfg_layout_function_footer;
3157 if (cfg_layout_function_footer)
3158 PREV_INSN (cfg_layout_function_footer) = insn;
3160 while (NEXT_INSN (insn))
3161 insn = NEXT_INSN (insn);
3163 set_last_insn (insn);
3164 #ifdef ENABLE_CHECKING
3165 verify_insn_chain ();
3166 #endif
3168 /* Now add jumps and labels as needed to match the blocks new
3169 outgoing edges. */
3171 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3173 edge e_fall, e_taken, e;
3174 rtx bb_end_insn;
3175 rtx ret_label = NULL_RTX;
3176 basic_block nb, src_bb;
3177 edge_iterator ei;
3179 if (EDGE_COUNT (bb->succs) == 0)
3180 continue;
3182 /* Find the old fallthru edge, and another non-EH edge for
3183 a taken jump. */
3184 e_taken = e_fall = NULL;
3186 FOR_EACH_EDGE (e, ei, bb->succs)
3187 if (e->flags & EDGE_FALLTHRU)
3188 e_fall = e;
3189 else if (! (e->flags & EDGE_EH))
3190 e_taken = e;
3192 bb_end_insn = BB_END (bb);
3193 if (JUMP_P (bb_end_insn))
3195 ret_label = JUMP_LABEL (bb_end_insn);
3196 if (any_condjump_p (bb_end_insn))
3198 /* This might happen if the conditional jump has side
3199 effects and could therefore not be optimized away.
3200 Make the basic block to end with a barrier in order
3201 to prevent rtl_verify_flow_info from complaining. */
3202 if (!e_fall)
3204 gcc_assert (!onlyjump_p (bb_end_insn)
3205 || returnjump_p (bb_end_insn));
3206 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3207 continue;
3210 /* If the old fallthru is still next, nothing to do. */
3211 if (bb->aux == e_fall->dest
3212 || e_fall->dest == EXIT_BLOCK_PTR)
3213 continue;
3215 /* The degenerated case of conditional jump jumping to the next
3216 instruction can happen for jumps with side effects. We need
3217 to construct a forwarder block and this will be done just
3218 fine by force_nonfallthru below. */
3219 if (!e_taken)
3222 /* There is another special case: if *neither* block is next,
3223 such as happens at the very end of a function, then we'll
3224 need to add a new unconditional jump. Choose the taken
3225 edge based on known or assumed probability. */
3226 else if (bb->aux != e_taken->dest)
3228 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3230 if (note
3231 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3232 && invert_jump (bb_end_insn,
3233 (e_fall->dest == EXIT_BLOCK_PTR
3234 ? NULL_RTX
3235 : label_for_bb (e_fall->dest)), 0))
3237 e_fall->flags &= ~EDGE_FALLTHRU;
3238 gcc_checking_assert (could_fall_through
3239 (e_taken->src, e_taken->dest));
3240 e_taken->flags |= EDGE_FALLTHRU;
3241 update_br_prob_note (bb);
3242 e = e_fall, e_fall = e_taken, e_taken = e;
3246 /* If the "jumping" edge is a crossing edge, and the fall
3247 through edge is non-crossing, leave things as they are. */
3248 else if ((e_taken->flags & EDGE_CROSSING)
3249 && !(e_fall->flags & EDGE_CROSSING))
3250 continue;
3252 /* Otherwise we can try to invert the jump. This will
3253 basically never fail, however, keep up the pretense. */
3254 else if (invert_jump (bb_end_insn,
3255 (e_fall->dest == EXIT_BLOCK_PTR
3256 ? NULL_RTX
3257 : label_for_bb (e_fall->dest)), 0))
3259 e_fall->flags &= ~EDGE_FALLTHRU;
3260 gcc_checking_assert (could_fall_through
3261 (e_taken->src, e_taken->dest));
3262 e_taken->flags |= EDGE_FALLTHRU;
3263 update_br_prob_note (bb);
3264 if (LABEL_NUSES (ret_label) == 0
3265 && single_pred_p (e_taken->dest))
3266 delete_insn (ret_label);
3267 continue;
3270 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3272 /* If the old fallthru is still next or if
3273 asm goto doesn't have a fallthru (e.g. when followed by
3274 __builtin_unreachable ()), nothing to do. */
3275 if (! e_fall
3276 || bb->aux == e_fall->dest
3277 || e_fall->dest == EXIT_BLOCK_PTR)
3278 continue;
3280 /* Otherwise we'll have to use the fallthru fixup below. */
3282 else
3284 /* Otherwise we have some return, switch or computed
3285 jump. In the 99% case, there should not have been a
3286 fallthru edge. */
3287 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3288 continue;
3291 else
3293 /* No fallthru implies a noreturn function with EH edges, or
3294 something similarly bizarre. In any case, we don't need to
3295 do anything. */
3296 if (! e_fall)
3297 continue;
3299 /* If the fallthru block is still next, nothing to do. */
3300 if (bb->aux == e_fall->dest)
3301 continue;
3303 /* A fallthru to exit block. */
3304 if (e_fall->dest == EXIT_BLOCK_PTR)
3305 continue;
3308 /* We got here if we need to add a new jump insn.
3309 Note force_nonfallthru can delete E_FALL and thus we have to
3310 save E_FALL->src prior to the call to force_nonfallthru. */
3311 src_bb = e_fall->src;
3312 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3313 if (nb)
3315 nb->aux = bb->aux;
3316 bb->aux = nb;
3317 /* Don't process this new block. */
3318 bb = nb;
3320 /* Make sure new bb is tagged for correct section (same as
3321 fall-thru source, since you cannot fall-thru across
3322 section boundaries). */
3323 BB_COPY_PARTITION (src_bb, single_pred (bb));
3324 if (flag_reorder_blocks_and_partition
3325 && targetm_common.have_named_sections
3326 && JUMP_P (BB_END (bb))
3327 && !any_condjump_p (BB_END (bb))
3328 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3329 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3333 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3335 /* Annoying special case - jump around dead jumptables left in the code. */
3336 FOR_EACH_BB (bb)
3338 edge e = find_fallthru_edge (bb->succs);
3340 if (e && !can_fallthru (e->src, e->dest))
3341 force_nonfallthru (e);
3344 /* Ensure goto_locus from edges has some instructions with that locus
3345 in RTL. */
3346 if (!optimize)
3347 FOR_EACH_BB (bb)
3349 edge e;
3350 edge_iterator ei;
3352 FOR_EACH_EDGE (e, ei, bb->succs)
3353 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3354 && !(e->flags & EDGE_ABNORMAL))
3356 edge e2;
3357 edge_iterator ei2;
3358 basic_block dest, nb;
3359 rtx end;
3361 insn = BB_END (e->src);
3362 end = PREV_INSN (BB_HEAD (e->src));
3363 while (insn != end
3364 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3365 insn = PREV_INSN (insn);
3366 if (insn != end
3367 && INSN_LOCATION (insn) == e->goto_locus)
3368 continue;
3369 if (simplejump_p (BB_END (e->src))
3370 && !INSN_HAS_LOCATION (BB_END (e->src)))
3372 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3373 continue;
3375 dest = e->dest;
3376 if (dest == EXIT_BLOCK_PTR)
3378 /* Non-fallthru edges to the exit block cannot be split. */
3379 if (!(e->flags & EDGE_FALLTHRU))
3380 continue;
3382 else
3384 insn = BB_HEAD (dest);
3385 end = NEXT_INSN (BB_END (dest));
3386 while (insn != end && !NONDEBUG_INSN_P (insn))
3387 insn = NEXT_INSN (insn);
3388 if (insn != end && INSN_HAS_LOCATION (insn)
3389 && INSN_LOCATION (insn) == e->goto_locus)
3390 continue;
3392 nb = split_edge (e);
3393 if (!INSN_P (BB_END (nb)))
3394 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3395 nb);
3396 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3398 /* If there are other incoming edges to the destination block
3399 with the same goto locus, redirect them to the new block as
3400 well, this can prevent other such blocks from being created
3401 in subsequent iterations of the loop. */
3402 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3403 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3404 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3405 && e->goto_locus == e2->goto_locus)
3406 redirect_edge_and_branch (e2, nb);
3407 else
3408 ei_next (&ei2);
3413 /* Perform sanity checks on the insn chain.
3414 1. Check that next/prev pointers are consistent in both the forward and
3415 reverse direction.
3416 2. Count insns in chain, going both directions, and check if equal.
3417 3. Check that get_last_insn () returns the actual end of chain. */
3419 DEBUG_FUNCTION void
3420 verify_insn_chain (void)
3422 rtx x, prevx, nextx;
3423 int insn_cnt1, insn_cnt2;
3425 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3426 x != 0;
3427 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3428 gcc_assert (PREV_INSN (x) == prevx);
3430 gcc_assert (prevx == get_last_insn ());
3432 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3433 x != 0;
3434 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3435 gcc_assert (NEXT_INSN (x) == nextx);
3437 gcc_assert (insn_cnt1 == insn_cnt2);
3440 /* If we have assembler epilogues, the block falling through to exit must
3441 be the last one in the reordered chain when we reach final. Ensure
3442 that this condition is met. */
3443 static void
3444 fixup_fallthru_exit_predecessor (void)
3446 edge e;
3447 basic_block bb = NULL;
3449 /* This transformation is not valid before reload, because we might
3450 separate a call from the instruction that copies the return
3451 value. */
3452 gcc_assert (reload_completed);
3454 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3455 if (e)
3456 bb = e->src;
3458 if (bb && bb->aux)
3460 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3462 /* If the very first block is the one with the fall-through exit
3463 edge, we have to split that block. */
3464 if (c == bb)
3466 bb = split_block (bb, NULL)->dest;
3467 bb->aux = c->aux;
3468 c->aux = bb;
3469 BB_FOOTER (bb) = BB_FOOTER (c);
3470 BB_FOOTER (c) = NULL;
3473 while (c->aux != bb)
3474 c = (basic_block) c->aux;
3476 c->aux = bb->aux;
3477 while (c->aux)
3478 c = (basic_block) c->aux;
3480 c->aux = bb;
3481 bb->aux = NULL;
3485 /* In case there are more than one fallthru predecessors of exit, force that
3486 there is only one. */
3488 static void
3489 force_one_exit_fallthru (void)
3491 edge e, predecessor = NULL;
3492 bool more = false;
3493 edge_iterator ei;
3494 basic_block forwarder, bb;
3496 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3497 if (e->flags & EDGE_FALLTHRU)
3499 if (predecessor == NULL)
3500 predecessor = e;
3501 else
3503 more = true;
3504 break;
3508 if (!more)
3509 return;
3511 /* Exit has several fallthru predecessors. Create a forwarder block for
3512 them. */
3513 forwarder = split_edge (predecessor);
3514 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3516 if (e->src == forwarder
3517 || !(e->flags & EDGE_FALLTHRU))
3518 ei_next (&ei);
3519 else
3520 redirect_edge_and_branch_force (e, forwarder);
3523 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3524 exit block. */
3525 FOR_EACH_BB (bb)
3527 if (bb->aux == NULL && bb != forwarder)
3529 bb->aux = forwarder;
3530 break;
3535 /* Return true in case it is possible to duplicate the basic block BB. */
3537 static bool
3538 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3540 /* Do not attempt to duplicate tablejumps, as we need to unshare
3541 the dispatch table. This is difficult to do, as the instructions
3542 computing jump destination may be hoisted outside the basic block. */
3543 if (tablejump_p (BB_END (bb), NULL, NULL))
3544 return false;
3546 /* Do not duplicate blocks containing insns that can't be copied. */
3547 if (targetm.cannot_copy_insn_p)
3549 rtx insn = BB_HEAD (bb);
3550 while (1)
3552 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3553 return false;
3554 if (insn == BB_END (bb))
3555 break;
3556 insn = NEXT_INSN (insn);
3560 return true;
3564 duplicate_insn_chain (rtx from, rtx to)
3566 rtx insn, last, copy;
3568 /* Avoid updating of boundaries of previous basic block. The
3569 note will get removed from insn stream in fixup. */
3570 last = emit_note (NOTE_INSN_DELETED);
3572 /* Create copy at the end of INSN chain. The chain will
3573 be reordered later. */
3574 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3576 switch (GET_CODE (insn))
3578 case DEBUG_INSN:
3579 /* Don't duplicate label debug insns. */
3580 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3581 break;
3582 /* FALLTHRU */
3583 case INSN:
3584 case CALL_INSN:
3585 case JUMP_INSN:
3586 /* Avoid copying of dispatch tables. We never duplicate
3587 tablejumps, so this can hit only in case the table got
3588 moved far from original jump. */
3589 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3590 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3592 /* Avoid copying following barrier as well if any
3593 (and debug insns in between). */
3594 rtx next;
3596 for (next = NEXT_INSN (insn);
3597 next != NEXT_INSN (to);
3598 next = NEXT_INSN (next))
3599 if (!DEBUG_INSN_P (next))
3600 break;
3601 if (next != NEXT_INSN (to) && BARRIER_P (next))
3602 insn = next;
3603 break;
3605 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3606 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3607 && ANY_RETURN_P (JUMP_LABEL (insn)))
3608 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3609 maybe_copy_prologue_epilogue_insn (insn, copy);
3610 break;
3612 case CODE_LABEL:
3613 break;
3615 case BARRIER:
3616 emit_barrier ();
3617 break;
3619 case NOTE:
3620 switch (NOTE_KIND (insn))
3622 /* In case prologue is empty and function contain label
3623 in first BB, we may want to copy the block. */
3624 case NOTE_INSN_PROLOGUE_END:
3626 case NOTE_INSN_DELETED:
3627 case NOTE_INSN_DELETED_LABEL:
3628 case NOTE_INSN_DELETED_DEBUG_LABEL:
3629 /* No problem to strip these. */
3630 case NOTE_INSN_FUNCTION_BEG:
3631 /* There is always just single entry to function. */
3632 case NOTE_INSN_BASIC_BLOCK:
3633 break;
3635 case NOTE_INSN_EPILOGUE_BEG:
3636 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3637 emit_note_copy (insn);
3638 break;
3640 default:
3641 /* All other notes should have already been eliminated. */
3642 gcc_unreachable ();
3644 break;
3645 default:
3646 gcc_unreachable ();
3649 insn = NEXT_INSN (last);
3650 delete_insn (last);
3651 return insn;
3654 /* Create a duplicate of the basic block BB. */
3656 static basic_block
3657 cfg_layout_duplicate_bb (basic_block bb)
3659 rtx insn;
3660 basic_block new_bb;
3662 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3663 new_bb = create_basic_block (insn,
3664 insn ? get_last_insn () : NULL,
3665 EXIT_BLOCK_PTR->prev_bb);
3667 BB_COPY_PARTITION (new_bb, bb);
3668 if (BB_HEADER (bb))
3670 insn = BB_HEADER (bb);
3671 while (NEXT_INSN (insn))
3672 insn = NEXT_INSN (insn);
3673 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3674 if (insn)
3675 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3678 if (BB_FOOTER (bb))
3680 insn = BB_FOOTER (bb);
3681 while (NEXT_INSN (insn))
3682 insn = NEXT_INSN (insn);
3683 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3684 if (insn)
3685 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3688 return new_bb;
3692 /* Main entry point to this module - initialize the datastructures for
3693 CFG layout changes. It keeps LOOPS up-to-date if not null.
3695 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3697 void
3698 cfg_layout_initialize (unsigned int flags)
3700 rtx x;
3701 basic_block bb;
3703 initialize_original_copy_tables ();
3705 cfg_layout_rtl_register_cfg_hooks ();
3707 record_effective_endpoints ();
3709 /* Make sure that the targets of non local gotos are marked. */
3710 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3712 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3713 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3716 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3719 /* Splits superblocks. */
3720 void
3721 break_superblocks (void)
3723 sbitmap superblocks;
3724 bool need = false;
3725 basic_block bb;
3727 superblocks = sbitmap_alloc (last_basic_block);
3728 sbitmap_zero (superblocks);
3730 FOR_EACH_BB (bb)
3731 if (bb->flags & BB_SUPERBLOCK)
3733 bb->flags &= ~BB_SUPERBLOCK;
3734 SET_BIT (superblocks, bb->index);
3735 need = true;
3738 if (need)
3740 rebuild_jump_labels (get_insns ());
3741 find_many_sub_basic_blocks (superblocks);
3744 free (superblocks);
3747 /* Finalize the changes: reorder insn list according to the sequence specified
3748 by aux pointers, enter compensation code, rebuild scope forest. */
3750 void
3751 cfg_layout_finalize (void)
3753 #ifdef ENABLE_CHECKING
3754 verify_flow_info ();
3755 #endif
3756 force_one_exit_fallthru ();
3757 rtl_register_cfg_hooks ();
3758 if (reload_completed
3759 #ifdef HAVE_epilogue
3760 && !HAVE_epilogue
3761 #endif
3763 fixup_fallthru_exit_predecessor ();
3764 fixup_reorder_chain ();
3766 rebuild_jump_labels (get_insns ());
3767 delete_dead_jumptables ();
3769 #ifdef ENABLE_CHECKING
3770 verify_insn_chain ();
3771 verify_flow_info ();
3772 #endif
3776 /* Same as split_block but update cfg_layout structures. */
3778 static basic_block
3779 cfg_layout_split_block (basic_block bb, void *insnp)
3781 rtx insn = (rtx) insnp;
3782 basic_block new_bb = rtl_split_block (bb, insn);
3784 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3785 BB_FOOTER (bb) = NULL;
3787 return new_bb;
3790 /* Redirect Edge to DEST. */
3791 static edge
3792 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
3794 basic_block src = e->src;
3795 edge ret;
3797 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3798 return NULL;
3800 if (e->dest == dest)
3801 return e;
3803 if (e->src != ENTRY_BLOCK_PTR
3804 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
3806 df_set_bb_dirty (src);
3807 return ret;
3810 if (e->src == ENTRY_BLOCK_PTR
3811 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3813 if (dump_file)
3814 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
3815 e->src->index, dest->index);
3817 df_set_bb_dirty (e->src);
3818 redirect_edge_succ (e, dest);
3819 return e;
3822 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3823 in the case the basic block appears to be in sequence. Avoid this
3824 transformation. */
3826 if (e->flags & EDGE_FALLTHRU)
3828 /* Redirect any branch edges unified with the fallthru one. */
3829 if (JUMP_P (BB_END (src))
3830 && label_is_jump_target_p (BB_HEAD (e->dest),
3831 BB_END (src)))
3833 edge redirected;
3835 if (dump_file)
3836 fprintf (dump_file, "Fallthru edge unified with branch "
3837 "%i->%i redirected to %i\n",
3838 e->src->index, e->dest->index, dest->index);
3839 e->flags &= ~EDGE_FALLTHRU;
3840 redirected = redirect_branch_edge (e, dest);
3841 gcc_assert (redirected);
3842 redirected->flags |= EDGE_FALLTHRU;
3843 df_set_bb_dirty (redirected->src);
3844 return redirected;
3846 /* In case we are redirecting fallthru edge to the branch edge
3847 of conditional jump, remove it. */
3848 if (EDGE_COUNT (src->succs) == 2)
3850 /* Find the edge that is different from E. */
3851 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
3853 if (s->dest == dest
3854 && any_condjump_p (BB_END (src))
3855 && onlyjump_p (BB_END (src)))
3856 delete_insn (BB_END (src));
3858 if (dump_file)
3859 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
3860 e->src->index, e->dest->index, dest->index);
3861 ret = redirect_edge_succ_nodup (e, dest);
3863 else
3864 ret = redirect_branch_edge (e, dest);
3866 /* We don't want simplejumps in the insn stream during cfglayout. */
3867 gcc_assert (!simplejump_p (BB_END (src)));
3869 df_set_bb_dirty (src);
3870 return ret;
3873 /* Simple wrapper as we always can redirect fallthru edges. */
3874 static basic_block
3875 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
3877 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3879 gcc_assert (redirected);
3880 return NULL;
3883 /* Same as delete_basic_block but update cfg_layout structures. */
3885 static void
3886 cfg_layout_delete_block (basic_block bb)
3888 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
3890 if (BB_HEADER (bb))
3892 next = BB_HEAD (bb);
3893 if (prev)
3894 NEXT_INSN (prev) = BB_HEADER (bb);
3895 else
3896 set_first_insn (BB_HEADER (bb));
3897 PREV_INSN (BB_HEADER (bb)) = prev;
3898 insn = BB_HEADER (bb);
3899 while (NEXT_INSN (insn))
3900 insn = NEXT_INSN (insn);
3901 NEXT_INSN (insn) = next;
3902 PREV_INSN (next) = insn;
3904 next = NEXT_INSN (BB_END (bb));
3905 if (BB_FOOTER (bb))
3907 insn = BB_FOOTER (bb);
3908 while (insn)
3910 if (BARRIER_P (insn))
3912 if (PREV_INSN (insn))
3913 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3914 else
3915 BB_FOOTER (bb) = NEXT_INSN (insn);
3916 if (NEXT_INSN (insn))
3917 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3919 if (LABEL_P (insn))
3920 break;
3921 insn = NEXT_INSN (insn);
3923 if (BB_FOOTER (bb))
3925 insn = BB_END (bb);
3926 NEXT_INSN (insn) = BB_FOOTER (bb);
3927 PREV_INSN (BB_FOOTER (bb)) = insn;
3928 while (NEXT_INSN (insn))
3929 insn = NEXT_INSN (insn);
3930 NEXT_INSN (insn) = next;
3931 if (next)
3932 PREV_INSN (next) = insn;
3933 else
3934 set_last_insn (insn);
3937 if (bb->next_bb != EXIT_BLOCK_PTR)
3938 to = &BB_HEADER (bb->next_bb);
3939 else
3940 to = &cfg_layout_function_footer;
3942 rtl_delete_block (bb);
3944 if (prev)
3945 prev = NEXT_INSN (prev);
3946 else
3947 prev = get_insns ();
3948 if (next)
3949 next = PREV_INSN (next);
3950 else
3951 next = get_last_insn ();
3953 if (next && NEXT_INSN (next) != prev)
3955 remaints = unlink_insn_chain (prev, next);
3956 insn = remaints;
3957 while (NEXT_INSN (insn))
3958 insn = NEXT_INSN (insn);
3959 NEXT_INSN (insn) = *to;
3960 if (*to)
3961 PREV_INSN (*to) = insn;
3962 *to = remaints;
3966 /* Return true when blocks A and B can be safely merged. */
3968 static bool
3969 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
3971 /* If we are partitioning hot/cold basic blocks, we don't want to
3972 mess up unconditional or indirect jumps that cross between hot
3973 and cold sections.
3975 Basic block partitioning may result in some jumps that appear to
3976 be optimizable (or blocks that appear to be mergeable), but which really
3977 must be left untouched (they are required to make it safely across
3978 partition boundaries). See the comments at the top of
3979 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3981 if (BB_PARTITION (a) != BB_PARTITION (b))
3982 return false;
3984 /* Protect the loop latches. */
3985 if (current_loops && b->loop_father->latch == b)
3986 return false;
3988 /* If we would end up moving B's instructions, make sure it doesn't fall
3989 through into the exit block, since we cannot recover from a fallthrough
3990 edge into the exit block occurring in the middle of a function. */
3991 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
3993 edge e = find_fallthru_edge (b->succs);
3994 if (e && e->dest == EXIT_BLOCK_PTR)
3995 return false;
3998 /* There must be exactly one edge in between the blocks. */
3999 return (single_succ_p (a)
4000 && single_succ (a) == b
4001 && single_pred_p (b) == 1
4002 && a != b
4003 /* Must be simple edge. */
4004 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4005 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4006 /* If the jump insn has side effects, we can't kill the edge.
4007 When not optimizing, try_redirect_by_replacing_jump will
4008 not allow us to redirect an edge by replacing a table jump. */
4009 && (!JUMP_P (BB_END (a))
4010 || ((!optimize || reload_completed)
4011 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4014 /* Merge block A and B. The blocks must be mergeable. */
4016 static void
4017 cfg_layout_merge_blocks (basic_block a, basic_block b)
4019 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4020 rtx insn;
4022 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4024 if (dump_file)
4025 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4026 a->index);
4028 /* If there was a CODE_LABEL beginning B, delete it. */
4029 if (LABEL_P (BB_HEAD (b)))
4031 delete_insn (BB_HEAD (b));
4034 /* We should have fallthru edge in a, or we can do dummy redirection to get
4035 it cleaned up. */
4036 if (JUMP_P (BB_END (a)))
4037 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4038 gcc_assert (!JUMP_P (BB_END (a)));
4040 /* When not optimizing CFG and the edge is the only place in RTL which holds
4041 some unique locus, emit a nop with that locus in between. */
4042 if (!optimize)
4043 emit_nop_for_unique_locus_between (a, b);
4045 /* Possible line number notes should appear in between. */
4046 if (BB_HEADER (b))
4048 rtx first = BB_END (a), last;
4050 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
4051 /* The above might add a BARRIER as BB_END, but as barriers
4052 aren't valid parts of a bb, remove_insn doesn't update
4053 BB_END if it is a barrier. So adjust BB_END here. */
4054 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4055 BB_END (a) = PREV_INSN (BB_END (a));
4056 delete_insn_chain (NEXT_INSN (first), last, false);
4057 BB_HEADER (b) = NULL;
4060 /* In the case basic blocks are not adjacent, move them around. */
4061 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4063 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4065 emit_insn_after_noloc (insn, BB_END (a), a);
4067 /* Otherwise just re-associate the instructions. */
4068 else
4070 insn = BB_HEAD (b);
4071 BB_END (a) = BB_END (b);
4074 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4075 We need to explicitly call. */
4076 update_bb_for_insn_chain (insn, BB_END (b), a);
4078 /* Skip possible DELETED_LABEL insn. */
4079 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4080 insn = NEXT_INSN (insn);
4081 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4082 BB_HEAD (b) = NULL;
4083 delete_insn (insn);
4085 df_bb_delete (b->index);
4087 /* Possible tablejumps and barriers should appear after the block. */
4088 if (BB_FOOTER (b))
4090 if (!BB_FOOTER (a))
4091 BB_FOOTER (a) = BB_FOOTER (b);
4092 else
4094 rtx last = BB_FOOTER (a);
4096 while (NEXT_INSN (last))
4097 last = NEXT_INSN (last);
4098 NEXT_INSN (last) = BB_FOOTER (b);
4099 PREV_INSN (BB_FOOTER (b)) = last;
4101 BB_FOOTER (b) = NULL;
4104 /* If B was a forwarder block, propagate the locus on the edge. */
4105 if (forwarder_p
4106 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) != UNKNOWN_LOCATION)
4107 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4109 if (dump_file)
4110 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4113 /* Split edge E. */
4115 static basic_block
4116 cfg_layout_split_edge (edge e)
4118 basic_block new_bb =
4119 create_basic_block (e->src != ENTRY_BLOCK_PTR
4120 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4121 NULL_RTX, e->src);
4123 if (e->dest == EXIT_BLOCK_PTR)
4124 BB_COPY_PARTITION (new_bb, e->src);
4125 else
4126 BB_COPY_PARTITION (new_bb, e->dest);
4127 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4128 redirect_edge_and_branch_force (e, new_bb);
4130 return new_bb;
4133 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4135 static void
4136 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4140 /* Return true if BB contains only labels or non-executable
4141 instructions. */
4143 static bool
4144 rtl_block_empty_p (basic_block bb)
4146 rtx insn;
4148 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4149 return true;
4151 FOR_BB_INSNS (bb, insn)
4152 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4153 return false;
4155 return true;
4158 /* Split a basic block if it ends with a conditional branch and if
4159 the other part of the block is not empty. */
4161 static basic_block
4162 rtl_split_block_before_cond_jump (basic_block bb)
4164 rtx insn;
4165 rtx split_point = NULL;
4166 rtx last = NULL;
4167 bool found_code = false;
4169 FOR_BB_INSNS (bb, insn)
4171 if (any_condjump_p (insn))
4172 split_point = last;
4173 else if (NONDEBUG_INSN_P (insn))
4174 found_code = true;
4175 last = insn;
4178 /* Did not find everything. */
4179 if (found_code && split_point)
4180 return split_block (bb, split_point)->dest;
4181 else
4182 return NULL;
4185 /* Return 1 if BB ends with a call, possibly followed by some
4186 instructions that must stay with the call, 0 otherwise. */
4188 static bool
4189 rtl_block_ends_with_call_p (basic_block bb)
4191 rtx insn = BB_END (bb);
4193 while (!CALL_P (insn)
4194 && insn != BB_HEAD (bb)
4195 && (keep_with_call_p (insn)
4196 || NOTE_P (insn)
4197 || DEBUG_INSN_P (insn)))
4198 insn = PREV_INSN (insn);
4199 return (CALL_P (insn));
4202 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4204 static bool
4205 rtl_block_ends_with_condjump_p (const_basic_block bb)
4207 return any_condjump_p (BB_END (bb));
4210 /* Return true if we need to add fake edge to exit.
4211 Helper function for rtl_flow_call_edges_add. */
4213 static bool
4214 need_fake_edge_p (const_rtx insn)
4216 if (!INSN_P (insn))
4217 return false;
4219 if ((CALL_P (insn)
4220 && !SIBLING_CALL_P (insn)
4221 && !find_reg_note (insn, REG_NORETURN, NULL)
4222 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4223 return true;
4225 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4226 && MEM_VOLATILE_P (PATTERN (insn)))
4227 || (GET_CODE (PATTERN (insn)) == PARALLEL
4228 && asm_noperands (insn) != -1
4229 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4230 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4233 /* Add fake edges to the function exit for any non constant and non noreturn
4234 calls, volatile inline assembly in the bitmap of blocks specified by
4235 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4236 that were split.
4238 The goal is to expose cases in which entering a basic block does not imply
4239 that all subsequent instructions must be executed. */
4241 static int
4242 rtl_flow_call_edges_add (sbitmap blocks)
4244 int i;
4245 int blocks_split = 0;
4246 int last_bb = last_basic_block;
4247 bool check_last_block = false;
4249 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4250 return 0;
4252 if (! blocks)
4253 check_last_block = true;
4254 else
4255 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4257 /* In the last basic block, before epilogue generation, there will be
4258 a fallthru edge to EXIT. Special care is required if the last insn
4259 of the last basic block is a call because make_edge folds duplicate
4260 edges, which would result in the fallthru edge also being marked
4261 fake, which would result in the fallthru edge being removed by
4262 remove_fake_edges, which would result in an invalid CFG.
4264 Moreover, we can't elide the outgoing fake edge, since the block
4265 profiler needs to take this into account in order to solve the minimal
4266 spanning tree in the case that the call doesn't return.
4268 Handle this by adding a dummy instruction in a new last basic block. */
4269 if (check_last_block)
4271 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4272 rtx insn = BB_END (bb);
4274 /* Back up past insns that must be kept in the same block as a call. */
4275 while (insn != BB_HEAD (bb)
4276 && keep_with_call_p (insn))
4277 insn = PREV_INSN (insn);
4279 if (need_fake_edge_p (insn))
4281 edge e;
4283 e = find_edge (bb, EXIT_BLOCK_PTR);
4284 if (e)
4286 insert_insn_on_edge (gen_use (const0_rtx), e);
4287 commit_edge_insertions ();
4292 /* Now add fake edges to the function exit for any non constant
4293 calls since there is no way that we can determine if they will
4294 return or not... */
4296 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4298 basic_block bb = BASIC_BLOCK (i);
4299 rtx insn;
4300 rtx prev_insn;
4302 if (!bb)
4303 continue;
4305 if (blocks && !TEST_BIT (blocks, i))
4306 continue;
4308 for (insn = BB_END (bb); ; insn = prev_insn)
4310 prev_insn = PREV_INSN (insn);
4311 if (need_fake_edge_p (insn))
4313 edge e;
4314 rtx split_at_insn = insn;
4316 /* Don't split the block between a call and an insn that should
4317 remain in the same block as the call. */
4318 if (CALL_P (insn))
4319 while (split_at_insn != BB_END (bb)
4320 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4321 split_at_insn = NEXT_INSN (split_at_insn);
4323 /* The handling above of the final block before the epilogue
4324 should be enough to verify that there is no edge to the exit
4325 block in CFG already. Calling make_edge in such case would
4326 cause us to mark that edge as fake and remove it later. */
4328 #ifdef ENABLE_CHECKING
4329 if (split_at_insn == BB_END (bb))
4331 e = find_edge (bb, EXIT_BLOCK_PTR);
4332 gcc_assert (e == NULL);
4334 #endif
4336 /* Note that the following may create a new basic block
4337 and renumber the existing basic blocks. */
4338 if (split_at_insn != BB_END (bb))
4340 e = split_block (bb, split_at_insn);
4341 if (e)
4342 blocks_split++;
4345 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4348 if (insn == BB_HEAD (bb))
4349 break;
4353 if (blocks_split)
4354 verify_flow_info ();
4356 return blocks_split;
4359 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4360 the conditional branch target, SECOND_HEAD should be the fall-thru
4361 there is no need to handle this here the loop versioning code handles
4362 this. the reason for SECON_HEAD is that it is needed for condition
4363 in trees, and this should be of the same type since it is a hook. */
4364 static void
4365 rtl_lv_add_condition_to_bb (basic_block first_head ,
4366 basic_block second_head ATTRIBUTE_UNUSED,
4367 basic_block cond_bb, void *comp_rtx)
4369 rtx label, seq, jump;
4370 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4371 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4372 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4373 enum machine_mode mode;
4376 label = block_label (first_head);
4377 mode = GET_MODE (op0);
4378 if (mode == VOIDmode)
4379 mode = GET_MODE (op1);
4381 start_sequence ();
4382 op0 = force_operand (op0, NULL_RTX);
4383 op1 = force_operand (op1, NULL_RTX);
4384 do_compare_rtx_and_jump (op0, op1, comp, 0,
4385 mode, NULL_RTX, NULL_RTX, label, -1);
4386 jump = get_last_insn ();
4387 JUMP_LABEL (jump) = label;
4388 LABEL_NUSES (label)++;
4389 seq = get_insns ();
4390 end_sequence ();
4392 /* Add the new cond , in the new head. */
4393 emit_insn_after(seq, BB_END(cond_bb));
4397 /* Given a block B with unconditional branch at its end, get the
4398 store the return the branch edge and the fall-thru edge in
4399 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4400 static void
4401 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4402 edge *fallthru_edge)
4404 edge e = EDGE_SUCC (b, 0);
4406 if (e->flags & EDGE_FALLTHRU)
4408 *fallthru_edge = e;
4409 *branch_edge = EDGE_SUCC (b, 1);
4411 else
4413 *branch_edge = e;
4414 *fallthru_edge = EDGE_SUCC (b, 1);
4418 void
4419 init_rtl_bb_info (basic_block bb)
4421 gcc_assert (!bb->il.x.rtl);
4422 bb->il.x.head_ = NULL;
4423 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4426 /* Returns true if it is possible to remove edge E by redirecting
4427 it to the destination of the other edge from E->src. */
4429 static bool
4430 rtl_can_remove_branch_p (const_edge e)
4432 const_basic_block src = e->src;
4433 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4434 const_rtx insn = BB_END (src), set;
4436 /* The conditions are taken from try_redirect_by_replacing_jump. */
4437 if (target == EXIT_BLOCK_PTR)
4438 return false;
4440 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4441 return false;
4443 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4444 || BB_PARTITION (src) != BB_PARTITION (target))
4445 return false;
4447 if (!onlyjump_p (insn)
4448 || tablejump_p (insn, NULL, NULL))
4449 return false;
4451 set = single_set (insn);
4452 if (!set || side_effects_p (set))
4453 return false;
4455 return true;
4458 static basic_block
4459 rtl_duplicate_bb (basic_block bb)
4461 bb = cfg_layout_duplicate_bb (bb);
4462 bb->aux = NULL;
4463 return bb;
4466 /* Do book-keeping of basic block BB for the profile consistency checker.
4467 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4468 then do post-pass accounting. Store the counting in RECORD. */
4469 static void
4470 rtl_account_profile_record (basic_block bb, int after_pass,
4471 struct profile_record *record)
4473 rtx insn;
4474 FOR_BB_INSNS (bb, insn)
4475 if (INSN_P (insn))
4477 record->size[after_pass]
4478 += insn_rtx_cost (PATTERN (insn), false);
4479 if (profile_status == PROFILE_READ)
4480 record->time[after_pass]
4481 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4482 else if (profile_status == PROFILE_GUESSED)
4483 record->time[after_pass]
4484 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4488 /* Implementation of CFG manipulation for linearized RTL. */
4489 struct cfg_hooks rtl_cfg_hooks = {
4490 "rtl",
4491 rtl_verify_flow_info,
4492 rtl_dump_bb,
4493 rtl_create_basic_block,
4494 rtl_redirect_edge_and_branch,
4495 rtl_redirect_edge_and_branch_force,
4496 rtl_can_remove_branch_p,
4497 rtl_delete_block,
4498 rtl_split_block,
4499 rtl_move_block_after,
4500 rtl_can_merge_blocks, /* can_merge_blocks_p */
4501 rtl_merge_blocks,
4502 rtl_predict_edge,
4503 rtl_predicted_by_p,
4504 cfg_layout_can_duplicate_bb_p,
4505 rtl_duplicate_bb,
4506 rtl_split_edge,
4507 rtl_make_forwarder_block,
4508 rtl_tidy_fallthru_edge,
4509 rtl_force_nonfallthru,
4510 rtl_block_ends_with_call_p,
4511 rtl_block_ends_with_condjump_p,
4512 rtl_flow_call_edges_add,
4513 NULL, /* execute_on_growing_pred */
4514 NULL, /* execute_on_shrinking_pred */
4515 NULL, /* duplicate loop for trees */
4516 NULL, /* lv_add_condition_to_bb */
4517 NULL, /* lv_adjust_loop_header_phi*/
4518 NULL, /* extract_cond_bb_edges */
4519 NULL, /* flush_pending_stmts */
4520 rtl_block_empty_p, /* block_empty_p */
4521 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4522 rtl_account_profile_record,
4525 /* Implementation of CFG manipulation for cfg layout RTL, where
4526 basic block connected via fallthru edges does not have to be adjacent.
4527 This representation will hopefully become the default one in future
4528 version of the compiler. */
4530 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4531 "cfglayout mode",
4532 rtl_verify_flow_info_1,
4533 rtl_dump_bb,
4534 cfg_layout_create_basic_block,
4535 cfg_layout_redirect_edge_and_branch,
4536 cfg_layout_redirect_edge_and_branch_force,
4537 rtl_can_remove_branch_p,
4538 cfg_layout_delete_block,
4539 cfg_layout_split_block,
4540 rtl_move_block_after,
4541 cfg_layout_can_merge_blocks_p,
4542 cfg_layout_merge_blocks,
4543 rtl_predict_edge,
4544 rtl_predicted_by_p,
4545 cfg_layout_can_duplicate_bb_p,
4546 cfg_layout_duplicate_bb,
4547 cfg_layout_split_edge,
4548 rtl_make_forwarder_block,
4549 NULL, /* tidy_fallthru_edge */
4550 rtl_force_nonfallthru,
4551 rtl_block_ends_with_call_p,
4552 rtl_block_ends_with_condjump_p,
4553 rtl_flow_call_edges_add,
4554 NULL, /* execute_on_growing_pred */
4555 NULL, /* execute_on_shrinking_pred */
4556 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4557 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4558 NULL, /* lv_adjust_loop_header_phi*/
4559 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
4560 NULL, /* flush_pending_stmts */
4561 rtl_block_empty_p, /* block_empty_p */
4562 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4563 rtl_account_profile_record,
4566 #include "gt-cfgrtl.h"