hppa: Always enable PIE on 64-bit target
[official-gcc.git] / gcc / cfgrtl.cc
bloba8549d5f986417aafabf8e62900af48e8a261172
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 #include "rtl-iter.h"
65 #include "gimplify.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* Disable warnings about missing quoting in GCC diagnostics. */
70 #if __GNUC__ >= 10
71 # pragma GCC diagnostic push
72 # pragma GCC diagnostic ignored "-Wformat-diag"
73 #endif
75 /* Holds the interesting leading and trailing notes for the function.
76 Only applicable if the CFG is in cfglayout mode. */
77 static GTY(()) rtx_insn *cfg_layout_function_footer;
78 static GTY(()) rtx_insn *cfg_layout_function_header;
80 static rtx_insn *skip_insns_after_block (basic_block);
81 static void record_effective_endpoints (void);
82 static void fixup_reorder_chain (void);
84 void verify_insn_chain (void);
85 static void fixup_fallthru_exit_predecessor (void);
86 static bool can_delete_note_p (const rtx_note *);
87 static bool can_delete_label_p (const rtx_code_label *);
88 static basic_block rtl_split_edge (edge);
89 static bool rtl_move_block_after (basic_block, basic_block);
90 static bool rtl_verify_flow_info (void);
91 static basic_block cfg_layout_split_block (basic_block, void *);
92 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
93 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
94 static void cfg_layout_delete_block (basic_block);
95 static void rtl_delete_block (basic_block);
96 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
97 static edge rtl_redirect_edge_and_branch (edge, basic_block);
98 static basic_block rtl_split_block (basic_block, void *);
99 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
100 static bool rtl_verify_flow_info_1 (void);
101 static void rtl_make_forwarder_block (edge);
102 static bool rtl_bb_info_initialized_p (basic_block bb);
104 /* Return true if NOTE is not one of the ones that must be kept paired,
105 so that we may simply delete it. */
107 static bool
108 can_delete_note_p (const rtx_note *note)
110 switch (NOTE_KIND (note))
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_BASIC_BLOCK:
114 case NOTE_INSN_EPILOGUE_BEG:
115 return true;
117 default:
118 return false;
122 /* True if a given label can be deleted. */
124 static bool
125 can_delete_label_p (const rtx_code_label *label)
127 return (!LABEL_PRESERVE_P (label)
128 /* User declared labels must be preserved. */
129 && LABEL_NAME (label) == 0
130 && !vec_safe_contains<rtx_insn *> (forced_labels,
131 const_cast<rtx_code_label *> (label)));
134 /* Delete INSN by patching it out. */
136 void
137 delete_insn (rtx_insn *insn)
139 rtx note;
140 bool really_delete = true;
142 if (LABEL_P (insn))
144 /* Some labels can't be directly removed from the INSN chain, as they
145 might be references via variables, constant pool etc.
146 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
147 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
149 const char *name = LABEL_NAME (insn);
150 basic_block bb = BLOCK_FOR_INSN (insn);
151 rtx_insn *bb_note = NEXT_INSN (insn);
153 really_delete = false;
154 PUT_CODE (insn, NOTE);
155 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
156 NOTE_DELETED_LABEL_NAME (insn) = name;
158 /* If the note following the label starts a basic block, and the
159 label is a member of the same basic block, interchange the two. */
160 if (bb_note != NULL_RTX
161 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
162 && bb != NULL
163 && bb == BLOCK_FOR_INSN (bb_note))
165 reorder_insns_nobb (insn, insn, bb_note);
166 BB_HEAD (bb) = bb_note;
167 if (BB_END (bb) == bb_note)
168 BB_END (bb) = insn;
172 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
175 if (really_delete)
177 /* If this insn has already been deleted, something is very wrong. */
178 gcc_assert (!insn->deleted ());
179 if (INSN_P (insn))
180 df_insn_delete (insn);
181 remove_insn (insn);
182 insn->set_deleted ();
185 /* If deleting a jump, decrement the use count of the label. Deleting
186 the label itself should happen in the normal course of block merging. */
187 if (JUMP_P (insn))
189 if (JUMP_LABEL (insn)
190 && LABEL_P (JUMP_LABEL (insn)))
191 LABEL_NUSES (JUMP_LABEL (insn))--;
193 /* If there are more targets, remove them too. */
194 while ((note
195 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
196 && LABEL_P (XEXP (note, 0)))
198 LABEL_NUSES (XEXP (note, 0))--;
199 remove_note (insn, note);
203 /* Also if deleting any insn that references a label as an operand. */
204 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
205 && LABEL_P (XEXP (note, 0)))
207 LABEL_NUSES (XEXP (note, 0))--;
208 remove_note (insn, note);
211 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
213 rtvec vec = table->get_labels ();
214 int len = GET_NUM_ELEM (vec);
215 int i;
217 for (i = 0; i < len; i++)
219 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
221 /* When deleting code in bulk (e.g. removing many unreachable
222 blocks) we can delete a label that's a target of the vector
223 before deleting the vector itself. */
224 if (!NOTE_P (label))
225 LABEL_NUSES (label)--;
230 /* Like delete_insn but also purge dead edges from BB.
231 Return true if any edges are eliminated. */
233 bool
234 delete_insn_and_edges (rtx_insn *insn)
236 bool purge = false;
238 if (NONDEBUG_INSN_P (insn) && BLOCK_FOR_INSN (insn))
240 basic_block bb = BLOCK_FOR_INSN (insn);
241 if (BB_END (bb) == insn)
242 purge = true;
243 else if (DEBUG_INSN_P (BB_END (bb)))
244 for (rtx_insn *dinsn = NEXT_INSN (insn);
245 DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
246 if (BB_END (bb) == dinsn)
248 purge = true;
249 break;
252 delete_insn (insn);
253 if (purge)
254 return purge_dead_edges (BLOCK_FOR_INSN (insn));
255 return false;
258 /* Unlink a chain of insns between START and FINISH, leaving notes
259 that must be paired. If CLEAR_BB is true, we set bb field for
260 insns that cannot be removed to NULL. */
262 void
263 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
265 /* Unchain the insns one by one. It would be quicker to delete all of these
266 with a single unchaining, rather than one at a time, but we need to keep
267 the NOTE's. */
268 rtx_insn *current = finish;
269 while (1)
271 rtx_insn *prev = PREV_INSN (current);
272 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
274 else
275 delete_insn (current);
277 if (clear_bb && !current->deleted ())
278 set_block_for_insn (current, NULL);
280 if (current == start)
281 break;
282 current = prev;
286 /* Create a new basic block consisting of the instructions between HEAD and END
287 inclusive. This function is designed to allow fast BB construction - reuses
288 the note and basic block struct in BB_NOTE, if any and do not grow
289 BASIC_BLOCK chain and should be used directly only by CFG construction code.
290 END can be NULL in to create new empty basic block before HEAD. Both END
291 and HEAD can be NULL to create basic block at the end of INSN chain.
292 AFTER is the basic block we should be put after. */
294 basic_block
295 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
296 basic_block after)
298 basic_block bb;
300 if (bb_note
301 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
302 && bb->aux == NULL)
304 /* If we found an existing note, thread it back onto the chain. */
306 rtx_insn *after;
308 if (LABEL_P (head))
309 after = head;
310 else
312 after = PREV_INSN (head);
313 head = bb_note;
316 if (after != bb_note && NEXT_INSN (after) != bb_note)
317 reorder_insns_nobb (bb_note, bb_note, after);
319 else
321 /* Otherwise we must create a note and a basic block structure. */
323 bb = alloc_block ();
325 init_rtl_bb_info (bb);
326 if (!head && !end)
327 head = end = bb_note
328 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
329 else if (LABEL_P (head) && end)
331 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
332 if (head == end)
333 end = bb_note;
335 else
337 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
338 head = bb_note;
339 if (!end)
340 end = head;
343 NOTE_BASIC_BLOCK (bb_note) = bb;
346 /* Always include the bb note in the block. */
347 if (NEXT_INSN (end) == bb_note)
348 end = bb_note;
350 BB_HEAD (bb) = head;
351 BB_END (bb) = end;
352 bb->index = last_basic_block_for_fn (cfun)++;
353 bb->flags = BB_NEW | BB_RTL;
354 link_block (bb, after);
355 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
356 df_bb_refs_record (bb->index, false);
357 update_bb_for_insn (bb);
358 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
360 /* Tag the block so that we know it has been used when considering
361 other basic block notes. */
362 bb->aux = bb;
364 return bb;
367 /* Create new basic block consisting of instructions in between HEAD and END
368 and place it to the BB chain after block AFTER. END can be NULL to
369 create a new empty basic block before HEAD. Both END and HEAD can be
370 NULL to create basic block at the end of INSN chain. */
372 static basic_block
373 rtl_create_basic_block (void *headp, void *endp, basic_block after)
375 rtx_insn *head = (rtx_insn *) headp;
376 rtx_insn *end = (rtx_insn *) endp;
377 basic_block bb;
379 /* Grow the basic block array if needed. */
380 if ((size_t) last_basic_block_for_fn (cfun)
381 >= basic_block_info_for_fn (cfun)->length ())
382 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
383 last_basic_block_for_fn (cfun) + 1);
385 n_basic_blocks_for_fn (cfun)++;
387 bb = create_basic_block_structure (head, end, NULL, after);
388 bb->aux = NULL;
389 return bb;
392 static basic_block
393 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
395 basic_block newbb = rtl_create_basic_block (head, end, after);
397 return newbb;
400 /* Delete the insns in a (non-live) block. We physically delete every
401 non-deleted-note insn, and update the flow graph appropriately.
403 Return nonzero if we deleted an exception handler. */
405 /* ??? Preserving all such notes strikes me as wrong. It would be nice
406 to post-process the stream to remove empty blocks, loops, ranges, etc. */
408 static void
409 rtl_delete_block (basic_block b)
411 rtx_insn *insn, *end;
413 /* If the head of this block is a CODE_LABEL, then it might be the
414 label for an exception handler which can't be reached. We need
415 to remove the label from the exception_handler_label list. */
416 insn = BB_HEAD (b);
418 end = get_last_bb_insn (b);
420 /* Selectively delete the entire chain. */
421 BB_HEAD (b) = NULL;
422 delete_insn_chain (insn, end, true);
425 if (dump_file)
426 fprintf (dump_file, "deleting block %d\n", b->index);
427 df_bb_delete (b->index);
430 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
432 void
433 compute_bb_for_insn (void)
435 basic_block bb;
437 FOR_EACH_BB_FN (bb, cfun)
439 rtx_insn *end = BB_END (bb);
440 rtx_insn *insn;
442 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
444 BLOCK_FOR_INSN (insn) = bb;
445 if (insn == end)
446 break;
451 /* Release the basic_block_for_insn array. */
453 void
454 free_bb_for_insn (void)
456 rtx_insn *insn;
457 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
458 if (!BARRIER_P (insn))
459 BLOCK_FOR_INSN (insn) = NULL;
462 namespace {
464 const pass_data pass_data_free_cfg =
466 RTL_PASS, /* type */
467 "*free_cfg", /* name */
468 OPTGROUP_NONE, /* optinfo_flags */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
477 class pass_free_cfg : public rtl_opt_pass
479 public:
480 pass_free_cfg (gcc::context *ctxt)
481 : rtl_opt_pass (pass_data_free_cfg, ctxt)
484 /* opt_pass methods: */
485 unsigned int execute (function *) final override;
487 }; // class pass_free_cfg
489 unsigned int
490 pass_free_cfg::execute (function *)
492 /* The resource.cc machinery uses DF but the CFG isn't guaranteed to be
493 valid at that point so it would be too late to call df_analyze. */
494 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
496 df_note_add_problem ();
497 df_analyze ();
500 if (crtl->has_bb_partition)
501 insert_section_boundary_note ();
503 free_bb_for_insn ();
504 return 0;
507 } // anon namespace
509 rtl_opt_pass *
510 make_pass_free_cfg (gcc::context *ctxt)
512 return new pass_free_cfg (ctxt);
515 /* Return RTX to emit after when we want to emit code on the entry of function. */
516 rtx_insn *
517 entry_of_function (void)
519 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
520 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
523 /* Emit INSN at the entry point of the function, ensuring that it is only
524 executed once per function. */
525 void
526 emit_insn_at_entry (rtx insn)
528 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
529 edge e = ei_safe_edge (ei);
530 gcc_assert (e->flags & EDGE_FALLTHRU);
532 insert_insn_on_edge (insn, e);
533 commit_edge_insertions ();
536 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
537 (or BARRIER if found) and notify df of the bb change.
538 The insn chain range is inclusive
539 (i.e. both BEGIN and END will be updated. */
541 static void
542 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
544 rtx_insn *insn;
546 end = NEXT_INSN (end);
547 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
548 if (!BARRIER_P (insn))
549 df_insn_change_bb (insn, bb);
552 /* Update BLOCK_FOR_INSN of insns in BB to BB,
553 and notify df of the change. */
555 void
556 update_bb_for_insn (basic_block bb)
558 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
562 /* Like active_insn_p, except keep the return value use or clobber around
563 even after reload. */
565 static bool
566 flow_active_insn_p (const rtx_insn *insn)
568 if (active_insn_p (insn))
569 return true;
571 /* A clobber of the function return value exists for buggy
572 programs that fail to return a value. Its effect is to
573 keep the return value from being live across the entire
574 function. If we allow it to be skipped, we introduce the
575 possibility for register lifetime confusion.
576 Similarly, keep a USE of the function return value, otherwise
577 the USE is dropped and we could fail to thread jump if USE
578 appears on some paths and not on others, see PR90257. */
579 if ((GET_CODE (PATTERN (insn)) == CLOBBER
580 || GET_CODE (PATTERN (insn)) == USE)
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
585 return false;
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
591 bool
592 contains_no_active_insn_p (const_basic_block bb)
594 rtx_insn *insn;
596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
597 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
598 || !single_succ_p (bb)
599 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
600 return false;
602 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
603 if (INSN_P (insn) && flow_active_insn_p (insn))
604 return false;
606 return (!INSN_P (insn)
607 || (JUMP_P (insn) && simplejump_p (insn))
608 || !flow_active_insn_p (insn));
611 /* Likewise, but protect loop latches, headers and preheaders. */
612 /* FIXME: Make this a cfg hook. */
614 bool
615 forwarder_block_p (const_basic_block bb)
617 if (!contains_no_active_insn_p (bb))
618 return false;
620 /* Protect loop latches, headers and preheaders. */
621 if (current_loops)
623 basic_block dest;
624 if (bb->loop_father->header == bb)
625 return false;
626 dest = EDGE_SUCC (bb, 0)->dest;
627 if (dest->loop_father->header == dest)
628 return false;
631 return true;
634 /* Return nonzero if we can reach target from src by falling through. */
635 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
637 bool
638 can_fallthru (basic_block src, basic_block target)
640 rtx_insn *insn = BB_END (src);
641 rtx_insn *insn2;
642 edge e;
643 edge_iterator ei;
645 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
646 return true;
647 if (src->next_bb != target)
648 return false;
650 /* ??? Later we may add code to move jump tables offline. */
651 if (tablejump_p (insn, NULL, NULL))
652 return false;
654 FOR_EACH_EDGE (e, ei, src->succs)
655 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
656 && e->flags & EDGE_FALLTHRU)
657 return false;
659 insn2 = BB_HEAD (target);
660 if (!active_insn_p (insn2))
661 insn2 = next_active_insn (insn2);
663 return next_active_insn (insn) == insn2;
666 /* Return nonzero if we could reach target from src by falling through,
667 if the target was made adjacent. If we already have a fall-through
668 edge to the exit block, we can't do that. */
669 static bool
670 could_fall_through (basic_block src, basic_block target)
672 edge e;
673 edge_iterator ei;
675 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
676 return true;
677 FOR_EACH_EDGE (e, ei, src->succs)
678 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
679 && e->flags & EDGE_FALLTHRU)
680 return 0;
681 return true;
684 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
685 rtx_note *
686 bb_note (basic_block bb)
688 rtx_insn *note;
690 note = BB_HEAD (bb);
691 if (LABEL_P (note))
692 note = NEXT_INSN (note);
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
695 return as_a <rtx_note *> (note);
698 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
699 note associated with the BLOCK. */
701 static rtx_insn *
702 first_insn_after_basic_block_note (basic_block block)
704 rtx_insn *insn;
706 /* Get the first instruction in the block. */
707 insn = BB_HEAD (block);
709 if (insn == NULL_RTX)
710 return NULL;
711 if (LABEL_P (insn))
712 insn = NEXT_INSN (insn);
713 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
715 return NEXT_INSN (insn);
718 /* Creates a new basic block just after basic block BB by splitting
719 everything after specified instruction INSNP. */
721 static basic_block
722 rtl_split_block (basic_block bb, void *insnp)
724 basic_block new_bb;
725 rtx_insn *insn = (rtx_insn *) insnp;
726 edge e;
727 edge_iterator ei;
729 if (!insn)
731 insn = first_insn_after_basic_block_note (bb);
733 if (insn)
735 rtx_insn *next = insn;
737 insn = PREV_INSN (insn);
739 /* If the block contains only debug insns, insn would have
740 been NULL in a non-debug compilation, and then we'd end
741 up emitting a DELETED note. For -fcompare-debug
742 stability, emit the note too. */
743 if (insn != BB_END (bb)
744 && DEBUG_INSN_P (next)
745 && DEBUG_INSN_P (BB_END (bb)))
747 while (next != BB_END (bb) && DEBUG_INSN_P (next))
748 next = NEXT_INSN (next);
750 if (next == BB_END (bb))
751 emit_note_after (NOTE_INSN_DELETED, next);
754 else
755 insn = get_last_insn ();
758 /* We probably should check type of the insn so that we do not create
759 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
760 bother. */
761 if (insn == BB_END (bb))
762 emit_note_after (NOTE_INSN_DELETED, insn);
764 /* Create the new basic block. */
765 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
766 BB_COPY_PARTITION (new_bb, bb);
767 BB_END (bb) = insn;
769 /* Redirect the outgoing edges. */
770 new_bb->succs = bb->succs;
771 bb->succs = NULL;
772 FOR_EACH_EDGE (e, ei, new_bb->succs)
773 e->src = new_bb;
775 /* The new block starts off being dirty. */
776 df_set_bb_dirty (bb);
777 return new_bb;
780 /* Return true if LOC1 and LOC2 are equivalent for
781 unique_locus_on_edge_between_p purposes. */
783 static bool
784 loc_equal (location_t loc1, location_t loc2)
786 if (loc1 == loc2)
787 return true;
789 expanded_location loce1 = expand_location (loc1);
790 expanded_location loce2 = expand_location (loc2);
792 if (loce1.line != loce2.line
793 || loce1.column != loce2.column
794 || loce1.data != loce2.data)
795 return false;
796 if (loce1.file == loce2.file)
797 return true;
798 return (loce1.file != NULL
799 && loce2.file != NULL
800 && filename_cmp (loce1.file, loce2.file) == 0);
803 /* Return true if the single edge between blocks A and B is the only place
804 in RTL which holds some unique locus. */
806 static bool
807 unique_locus_on_edge_between_p (basic_block a, basic_block b)
809 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
810 rtx_insn *insn, *end;
812 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
813 return false;
815 /* First scan block A backward. */
816 insn = BB_END (a);
817 end = PREV_INSN (BB_HEAD (a));
818 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
819 insn = PREV_INSN (insn);
821 if (insn != end && loc_equal (INSN_LOCATION (insn), goto_locus))
822 return false;
824 /* Then scan block B forward. */
825 insn = BB_HEAD (b);
826 if (insn)
828 end = NEXT_INSN (BB_END (b));
829 while (insn != end && !NONDEBUG_INSN_P (insn))
830 insn = NEXT_INSN (insn);
832 if (insn != end && INSN_HAS_LOCATION (insn)
833 && loc_equal (INSN_LOCATION (insn), goto_locus))
834 return false;
837 return true;
840 /* If the single edge between blocks A and B is the only place in RTL which
841 holds some unique locus, emit a nop with that locus between the blocks. */
843 static void
844 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
846 if (!unique_locus_on_edge_between_p (a, b))
847 return;
849 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
850 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
853 /* Blocks A and B are to be merged into a single block A. The insns
854 are already contiguous. */
856 static void
857 rtl_merge_blocks (basic_block a, basic_block b)
859 /* If B is a forwarder block whose outgoing edge has no location, we'll
860 propagate the locus of the edge between A and B onto it. */
861 const bool forward_edge_locus
862 = (b->flags & BB_FORWARDER_BLOCK) != 0
863 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
864 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
865 rtx_insn *del_first = NULL, *del_last = NULL;
866 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
867 bool b_empty = false;
869 if (dump_file)
870 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
871 a->index);
873 while (DEBUG_INSN_P (b_end))
874 b_end = PREV_INSN (b_debug_start = b_end);
876 /* If there was a CODE_LABEL beginning B, delete it. */
877 if (LABEL_P (b_head))
879 /* Detect basic blocks with nothing but a label. This can happen
880 in particular at the end of a function. */
881 if (b_head == b_end)
882 b_empty = true;
884 del_first = del_last = b_head;
885 b_head = NEXT_INSN (b_head);
888 /* Delete the basic block note and handle blocks containing just that
889 note. */
890 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
892 if (b_head == b_end)
893 b_empty = true;
894 if (! del_last)
895 del_first = b_head;
897 del_last = b_head;
898 b_head = NEXT_INSN (b_head);
901 /* If there was a jump out of A, delete it. */
902 if (JUMP_P (a_end))
904 rtx_insn *prev;
906 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
907 if (!NOTE_P (prev)
908 || NOTE_INSN_BASIC_BLOCK_P (prev)
909 || prev == BB_HEAD (a))
910 break;
912 del_first = a_end;
914 a_end = PREV_INSN (del_first);
916 else if (BARRIER_P (NEXT_INSN (a_end)))
917 del_first = NEXT_INSN (a_end);
919 /* Delete everything marked above as well as crap that might be
920 hanging out between the two blocks. */
921 BB_END (a) = a_end;
922 BB_HEAD (b) = b_empty ? NULL : b_head;
923 delete_insn_chain (del_first, del_last, true);
925 /* If not optimizing, preserve the locus of the single edge between
926 blocks A and B if necessary by emitting a nop. */
927 if (!optimize
928 && !forward_edge_locus
929 && !DECL_IGNORED_P (current_function_decl))
931 emit_nop_for_unique_locus_between (a, b);
932 a_end = BB_END (a);
935 /* Reassociate the insns of B with A. */
936 if (!b_empty)
938 update_bb_for_insn_chain (a_end, b_debug_end, a);
940 BB_END (a) = b_debug_end;
941 BB_HEAD (b) = NULL;
943 else if (b_end != b_debug_end)
945 /* Move any deleted labels and other notes between the end of A
946 and the debug insns that make up B after the debug insns,
947 bringing the debug insns into A while keeping the notes after
948 the end of A. */
949 if (NEXT_INSN (a_end) != b_debug_start)
950 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
951 b_debug_end);
952 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
953 BB_END (a) = b_debug_end;
956 df_bb_delete (b->index);
958 if (forward_edge_locus)
959 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
961 if (dump_file)
962 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
966 /* Return true when block A and B can be merged. */
968 static bool
969 rtl_can_merge_blocks (basic_block a, basic_block b)
971 /* If we are partitioning hot/cold basic blocks, we don't want to
972 mess up unconditional or indirect jumps that cross between hot
973 and cold sections.
975 Basic block partitioning may result in some jumps that appear to
976 be optimizable (or blocks that appear to be mergeable), but which really
977 must be left untouched (they are required to make it safely across
978 partition boundaries). See the comments at the top of
979 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
981 if (BB_PARTITION (a) != BB_PARTITION (b))
982 return false;
984 /* Protect the loop latches. */
985 if (current_loops && b->loop_father->latch == b)
986 return false;
988 /* There must be exactly one edge in between the blocks. */
989 return (single_succ_p (a)
990 && single_succ (a) == b
991 && single_pred_p (b)
992 && a != b
993 /* Must be simple edge. */
994 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
995 && a->next_bb == b
996 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
997 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
998 /* If the jump insn has side effects,
999 we can't kill the edge. */
1000 && (!JUMP_P (BB_END (a))
1001 || (reload_completed
1002 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
1005 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
1006 exist. */
1008 rtx_code_label *
1009 block_label (basic_block block)
1011 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1012 return NULL;
1014 if (!LABEL_P (BB_HEAD (block)))
1016 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1019 return as_a <rtx_code_label *> (BB_HEAD (block));
1022 /* Remove all barriers from BB_FOOTER of a BB. */
1024 static void
1025 remove_barriers_from_footer (basic_block bb)
1027 rtx_insn *insn = BB_FOOTER (bb);
1029 /* Remove barriers but keep jumptables. */
1030 while (insn)
1032 if (BARRIER_P (insn))
1034 if (PREV_INSN (insn))
1035 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1036 else
1037 BB_FOOTER (bb) = NEXT_INSN (insn);
1038 if (NEXT_INSN (insn))
1039 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1041 if (LABEL_P (insn))
1042 return;
1043 insn = NEXT_INSN (insn);
1047 /* Attempt to perform edge redirection by replacing possibly complex jump
1048 instruction by unconditional jump or removing jump completely. This can
1049 apply only if all edges now point to the same block. The parameters and
1050 return values are equivalent to redirect_edge_and_branch. */
1052 edge
1053 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1055 basic_block src = e->src;
1056 rtx_insn *insn = BB_END (src);
1057 rtx set;
1058 bool fallthru = false;
1060 /* If we are partitioning hot/cold basic blocks, we don't want to
1061 mess up unconditional or indirect jumps that cross between hot
1062 and cold sections.
1064 Basic block partitioning may result in some jumps that appear to
1065 be optimizable (or blocks that appear to be mergeable), but which really
1066 must be left untouched (they are required to make it safely across
1067 partition boundaries). See the comments at the top of
1068 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
1070 if (BB_PARTITION (src) != BB_PARTITION (target))
1071 return NULL;
1073 /* We can replace or remove a complex jump only when we have exactly
1074 two edges. Also, if we have exactly one outgoing edge, we can
1075 redirect that. */
1076 if (EDGE_COUNT (src->succs) >= 3
1077 /* Verify that all targets will be TARGET. Specifically, the
1078 edge that is not E must also go to TARGET. */
1079 || (EDGE_COUNT (src->succs) == 2
1080 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1081 return NULL;
1083 if (!onlyjump_p (insn))
1084 return NULL;
1085 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1086 return NULL;
1088 /* Avoid removing branch with side effects. */
1089 set = single_set (insn);
1090 if (!set || side_effects_p (set))
1091 return NULL;
1093 /* See if we can create the fallthru edge. */
1094 if (in_cfglayout || can_fallthru (src, target))
1096 if (dump_file)
1097 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1098 fallthru = true;
1100 /* Selectively unlink whole insn chain. */
1101 if (in_cfglayout)
1103 delete_insn_chain (insn, BB_END (src), false);
1104 remove_barriers_from_footer (src);
1106 else
1107 delete_insn_chain (insn, PREV_INSN (BB_HEAD (target)), false);
1110 /* If this already is simplejump, redirect it. */
1111 else if (simplejump_p (insn))
1113 if (e->dest == target)
1114 return NULL;
1115 if (dump_file)
1116 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1117 INSN_UID (insn), e->dest->index, target->index);
1118 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1119 block_label (target), 0))
1121 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1122 return NULL;
1126 /* Cannot do anything for target exit block. */
1127 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1128 return NULL;
1130 /* Or replace possibly complicated jump insn by simple jump insn. */
1131 else
1133 rtx_code_label *target_label = block_label (target);
1134 rtx_insn *barrier;
1135 rtx_insn *label;
1136 rtx_jump_table_data *table;
1138 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1139 JUMP_LABEL (BB_END (src)) = target_label;
1140 LABEL_NUSES (target_label)++;
1141 if (dump_file)
1142 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1143 INSN_UID (insn), INSN_UID (BB_END (src)));
1146 delete_insn_chain (insn, insn, false);
1148 /* Recognize a tablejump that we are converting to a
1149 simple jump and remove its associated CODE_LABEL
1150 and ADDR_VEC or ADDR_DIFF_VEC. */
1151 if (tablejump_p (insn, &label, &table))
1152 delete_insn_chain (label, table, false);
1154 barrier = next_nonnote_nondebug_insn (BB_END (src));
1155 if (!barrier || !BARRIER_P (barrier))
1156 emit_barrier_after (BB_END (src));
1157 else
1159 if (barrier != NEXT_INSN (BB_END (src)))
1161 /* Move the jump before barrier so that the notes
1162 which originally were or were created before jump table are
1163 inside the basic block. */
1164 rtx_insn *new_insn = BB_END (src);
1166 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1167 PREV_INSN (barrier), src);
1169 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1170 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1172 SET_NEXT_INSN (new_insn) = barrier;
1173 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1175 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1176 SET_PREV_INSN (barrier) = new_insn;
1181 /* Keep only one edge out and set proper flags. */
1182 if (!single_succ_p (src))
1183 remove_edge (e);
1184 gcc_assert (single_succ_p (src));
1186 e = single_succ_edge (src);
1187 if (fallthru)
1188 e->flags = EDGE_FALLTHRU;
1189 else
1190 e->flags = 0;
1192 e->probability = profile_probability::always ();
1194 if (e->dest != target)
1195 redirect_edge_succ (e, target);
1196 return e;
1199 /* Subroutine of redirect_branch_edge that tries to patch the jump
1200 instruction INSN so that it reaches block NEW. Do this
1201 only when it originally reached block OLD. Return true if this
1202 worked or the original target wasn't OLD, return false if redirection
1203 doesn't work. */
1205 static bool
1206 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1208 rtx_jump_table_data *table;
1209 rtx tmp;
1210 /* Recognize a tablejump and adjust all matching cases. */
1211 if (tablejump_p (insn, NULL, &table))
1213 rtvec vec;
1214 int j;
1215 rtx_code_label *new_label = block_label (new_bb);
1217 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1218 return false;
1219 vec = table->get_labels ();
1221 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1222 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1224 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1225 --LABEL_NUSES (old_label);
1226 ++LABEL_NUSES (new_label);
1229 /* Handle casesi dispatch insns. */
1230 if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1231 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1233 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1234 new_label);
1235 --LABEL_NUSES (old_label);
1236 ++LABEL_NUSES (new_label);
1239 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1241 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1242 rtx note;
1244 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1245 return false;
1246 rtx_code_label *new_label = block_label (new_bb);
1248 for (i = 0; i < n; ++i)
1250 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1251 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1252 if (XEXP (old_ref, 0) == old_label)
1254 ASM_OPERANDS_LABEL (tmp, i)
1255 = gen_rtx_LABEL_REF (Pmode, new_label);
1256 --LABEL_NUSES (old_label);
1257 ++LABEL_NUSES (new_label);
1261 if (JUMP_LABEL (insn) == old_label)
1263 JUMP_LABEL (insn) = new_label;
1264 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1265 if (note)
1266 remove_note (insn, note);
1268 else
1270 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1271 if (note)
1272 remove_note (insn, note);
1273 if (JUMP_LABEL (insn) != new_label
1274 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1275 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1277 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1278 != NULL_RTX)
1279 XEXP (note, 0) = new_label;
1281 else
1283 /* ?? We may play the games with moving the named labels from
1284 one basic block to the other in case only one computed_jump is
1285 available. */
1286 if (computed_jump_p (insn)
1287 /* A return instruction can't be redirected. */
1288 || returnjump_p (insn))
1289 return false;
1291 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1293 /* If the insn doesn't go where we think, we're confused. */
1294 gcc_assert (JUMP_LABEL (insn) == old_label);
1296 /* If the substitution doesn't succeed, die. This can happen
1297 if the back end emitted unrecognizable instructions or if
1298 target is exit block on some arches. Or for crossing
1299 jumps. */
1300 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1301 block_label (new_bb), 0))
1303 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1304 || CROSSING_JUMP_P (insn));
1305 return false;
1309 return true;
1313 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1314 NULL on failure */
1315 static edge
1316 redirect_branch_edge (edge e, basic_block target)
1318 rtx_insn *old_label = BB_HEAD (e->dest);
1319 basic_block src = e->src;
1320 rtx_insn *insn = BB_END (src);
1322 /* We can only redirect non-fallthru edges of jump insn. */
1323 if (e->flags & EDGE_FALLTHRU)
1324 return NULL;
1325 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1326 return NULL;
1328 if (!currently_expanding_to_rtl)
1330 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1331 return NULL;
1333 else
1334 /* When expanding this BB might actually contain multiple
1335 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1336 Redirect all of those that match our label. */
1337 FOR_BB_INSNS (src, insn)
1338 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1339 old_label, target))
1340 return NULL;
1342 if (dump_file)
1343 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1344 e->src->index, e->dest->index, target->index);
1346 if (e->dest != target)
1347 e = redirect_edge_succ_nodup (e, target);
1349 return e;
1352 /* Called when edge E has been redirected to a new destination,
1353 in order to update the region crossing flag on the edge and
1354 jump. */
1356 static void
1357 fixup_partition_crossing (edge e)
1359 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1360 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1361 return;
1362 /* If we redirected an existing edge, it may already be marked
1363 crossing, even though the new src is missing a reg crossing note.
1364 But make sure reg crossing note doesn't already exist before
1365 inserting. */
1366 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1368 e->flags |= EDGE_CROSSING;
1369 if (JUMP_P (BB_END (e->src)))
1370 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1372 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1374 e->flags &= ~EDGE_CROSSING;
1375 /* Remove the section crossing note from jump at end of
1376 src if it exists, and if no other successors are
1377 still crossing. */
1378 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1380 bool has_crossing_succ = false;
1381 edge e2;
1382 edge_iterator ei;
1383 FOR_EACH_EDGE (e2, ei, e->src->succs)
1385 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1386 if (has_crossing_succ)
1387 break;
1389 if (!has_crossing_succ)
1390 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1395 /* Called when block BB has been reassigned to the cold partition,
1396 because it is now dominated by another cold block,
1397 to ensure that the region crossing attributes are updated. */
1399 static void
1400 fixup_new_cold_bb (basic_block bb)
1402 edge e;
1403 edge_iterator ei;
1405 /* This is called when a hot bb is found to now be dominated
1406 by a cold bb and therefore needs to become cold. Therefore,
1407 its preds will no longer be region crossing. Any non-dominating
1408 preds that were previously hot would also have become cold
1409 in the caller for the same region. Any preds that were previously
1410 region-crossing will be adjusted in fixup_partition_crossing. */
1411 FOR_EACH_EDGE (e, ei, bb->preds)
1413 fixup_partition_crossing (e);
1416 /* Possibly need to make bb's successor edges region crossing,
1417 or remove stale region crossing. */
1418 FOR_EACH_EDGE (e, ei, bb->succs)
1420 /* We can't have fall-through edges across partition boundaries.
1421 Note that force_nonfallthru will do any necessary partition
1422 boundary fixup by calling fixup_partition_crossing itself. */
1423 if ((e->flags & EDGE_FALLTHRU)
1424 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1425 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1426 force_nonfallthru (e);
1427 else
1428 fixup_partition_crossing (e);
1432 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1433 expense of adding new instructions or reordering basic blocks.
1435 Function can be also called with edge destination equivalent to the TARGET.
1436 Then it should try the simplifications and do nothing if none is possible.
1438 Return edge representing the branch if transformation succeeded. Return NULL
1439 on failure.
1440 We still return NULL in case E already destinated TARGET and we didn't
1441 managed to simplify instruction stream. */
1443 static edge
1444 rtl_redirect_edge_and_branch (edge e, basic_block target)
1446 edge ret;
1447 basic_block src = e->src;
1448 basic_block dest = e->dest;
1450 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1451 return NULL;
1453 if (dest == target)
1454 return e;
1456 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1458 df_set_bb_dirty (src);
1459 fixup_partition_crossing (ret);
1460 return ret;
1463 ret = redirect_branch_edge (e, target);
1464 if (!ret)
1465 return NULL;
1467 df_set_bb_dirty (src);
1468 fixup_partition_crossing (ret);
1469 return ret;
1472 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1474 void
1475 emit_barrier_after_bb (basic_block bb)
1477 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1478 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1479 || current_ir_type () == IR_RTL_CFGLAYOUT);
1480 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1482 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1484 if (BB_FOOTER (bb))
1486 rtx_insn *footer_tail = BB_FOOTER (bb);
1488 while (NEXT_INSN (footer_tail))
1489 footer_tail = NEXT_INSN (footer_tail);
1490 if (!BARRIER_P (footer_tail))
1492 SET_NEXT_INSN (footer_tail) = insn;
1493 SET_PREV_INSN (insn) = footer_tail;
1496 else
1497 BB_FOOTER (bb) = insn;
1501 /* Like force_nonfallthru below, but additionally performs redirection
1502 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1503 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1504 simple_return_rtx, indicating which kind of returnjump to create.
1505 It should be NULL otherwise. */
1507 basic_block
1508 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1510 basic_block jump_block, new_bb = NULL, src = e->src;
1511 rtx note;
1512 edge new_edge;
1513 int abnormal_edge_flags = 0;
1514 bool asm_goto_edge = false;
1515 int loc;
1517 /* In the case the last instruction is conditional jump to the next
1518 instruction, first redirect the jump itself and then continue
1519 by creating a basic block afterwards to redirect fallthru edge. */
1520 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1521 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1522 && any_condjump_p (BB_END (e->src))
1523 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1525 rtx note;
1526 edge b = unchecked_make_edge (e->src, target, 0);
1527 bool redirected;
1529 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1530 block_label (target), 0);
1531 gcc_assert (redirected);
1533 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1534 if (note)
1536 int prob = XINT (note, 0);
1538 b->probability = profile_probability::from_reg_br_prob_note (prob);
1539 e->probability -= e->probability;
1543 if (e->flags & EDGE_ABNORMAL)
1545 /* Irritating special case - fallthru edge to the same block as abnormal
1546 edge.
1547 We can't redirect abnormal edge, but we still can split the fallthru
1548 one and create separate abnormal edge to original destination.
1549 This allows bb-reorder to make such edge non-fallthru. */
1550 gcc_assert (e->dest == target);
1551 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1552 e->flags &= EDGE_FALLTHRU;
1554 else
1556 gcc_assert (e->flags & EDGE_FALLTHRU);
1557 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1559 /* We can't redirect the entry block. Create an empty block
1560 at the start of the function which we use to add the new
1561 jump. */
1562 edge tmp;
1563 edge_iterator ei;
1564 bool found = false;
1566 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1567 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1568 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1570 /* Make sure new block ends up in correct hot/cold section. */
1571 BB_COPY_PARTITION (bb, e->dest);
1573 /* Change the existing edge's source to be the new block, and add
1574 a new edge from the entry block to the new block. */
1575 e->src = bb;
1576 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1577 (tmp = ei_safe_edge (ei)); )
1579 if (tmp == e)
1581 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1582 found = true;
1583 break;
1585 else
1586 ei_next (&ei);
1589 gcc_assert (found);
1591 vec_safe_push (bb->succs, e);
1592 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1593 EDGE_FALLTHRU);
1597 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1598 don't point to the target or fallthru label. */
1599 if (JUMP_P (BB_END (e->src))
1600 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1601 && (e->flags & EDGE_FALLTHRU)
1602 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1604 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1605 bool adjust_jump_target = false;
1607 for (i = 0; i < n; ++i)
1609 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1611 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1612 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1613 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1614 adjust_jump_target = true;
1616 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1617 asm_goto_edge = true;
1619 if (adjust_jump_target)
1621 rtx_insn *insn = BB_END (e->src);
1622 rtx note;
1623 rtx_insn *old_label = BB_HEAD (e->dest);
1624 rtx_insn *new_label = BB_HEAD (target);
1626 if (JUMP_LABEL (insn) == old_label)
1628 JUMP_LABEL (insn) = new_label;
1629 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1630 if (note)
1631 remove_note (insn, note);
1633 else
1635 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1636 if (note)
1637 remove_note (insn, note);
1638 if (JUMP_LABEL (insn) != new_label
1639 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1640 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1642 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1643 != NULL_RTX)
1644 XEXP (note, 0) = new_label;
1648 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1650 rtx_insn *new_head;
1651 profile_count count = e->count ();
1652 profile_probability probability = e->probability;
1653 /* Create the new structures. */
1655 /* If the old block ended with a tablejump, skip its table
1656 by searching forward from there. Otherwise start searching
1657 forward from the last instruction of the old block. */
1658 rtx_jump_table_data *table;
1659 if (tablejump_p (BB_END (e->src), NULL, &table))
1660 new_head = table;
1661 else
1662 new_head = BB_END (e->src);
1663 new_head = NEXT_INSN (new_head);
1665 jump_block = create_basic_block (new_head, NULL, e->src);
1666 jump_block->count = count;
1668 /* Make sure new block ends up in correct hot/cold section. */
1670 BB_COPY_PARTITION (jump_block, e->src);
1672 /* Wire edge in. */
1673 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1674 new_edge->probability = probability;
1676 /* Redirect old edge. */
1677 redirect_edge_pred (e, jump_block);
1678 e->probability = profile_probability::always ();
1680 /* If e->src was previously region crossing, it no longer is
1681 and the reg crossing note should be removed. */
1682 fixup_partition_crossing (new_edge);
1684 /* If asm goto has any label refs to target's label,
1685 add also edge from asm goto bb to target. */
1686 if (asm_goto_edge)
1688 new_edge->probability /= 2;
1689 jump_block->count /= 2;
1690 edge new_edge2 = make_edge (new_edge->src, target,
1691 e->flags & ~EDGE_FALLTHRU);
1692 new_edge2->probability = probability - new_edge->probability;
1695 new_bb = jump_block;
1697 else
1698 jump_block = e->src;
1700 loc = e->goto_locus;
1701 e->flags &= ~EDGE_FALLTHRU;
1702 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1704 if (jump_label == ret_rtx)
1705 emit_jump_insn_after_setloc (targetm.gen_return (),
1706 BB_END (jump_block), loc);
1707 else
1709 gcc_assert (jump_label == simple_return_rtx);
1710 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1711 BB_END (jump_block), loc);
1713 set_return_jump_label (BB_END (jump_block));
1715 else
1717 rtx_code_label *label = block_label (target);
1718 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1719 BB_END (jump_block), loc);
1720 JUMP_LABEL (BB_END (jump_block)) = label;
1721 LABEL_NUSES (label)++;
1724 /* We might be in cfg layout mode, and if so, the following routine will
1725 insert the barrier correctly. */
1726 emit_barrier_after_bb (jump_block);
1727 redirect_edge_succ_nodup (e, target);
1729 if (abnormal_edge_flags)
1730 make_edge (src, target, abnormal_edge_flags);
1732 df_mark_solutions_dirty ();
1733 fixup_partition_crossing (e);
1734 return new_bb;
1737 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1738 (and possibly create new basic block) to make edge non-fallthru.
1739 Return newly created BB or NULL if none. */
1741 static basic_block
1742 rtl_force_nonfallthru (edge e)
1744 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1747 /* Redirect edge even at the expense of creating new jump insn or
1748 basic block. Return new basic block if created, NULL otherwise.
1749 Conversion must be possible. */
1751 static basic_block
1752 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1754 if (redirect_edge_and_branch (e, target)
1755 || e->dest == target)
1756 return NULL;
1758 /* In case the edge redirection failed, try to force it to be non-fallthru
1759 and redirect newly created simplejump. */
1760 df_set_bb_dirty (e->src);
1761 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1764 /* The given edge should potentially be a fallthru edge. If that is in
1765 fact true, delete the jump and barriers that are in the way. */
1767 static void
1768 rtl_tidy_fallthru_edge (edge e)
1770 rtx_insn *q;
1771 basic_block b = e->src, c = b->next_bb;
1773 /* ??? In a late-running flow pass, other folks may have deleted basic
1774 blocks by nopping out blocks, leaving multiple BARRIERs between here
1775 and the target label. They ought to be chastised and fixed.
1777 We can also wind up with a sequence of undeletable labels between
1778 one block and the next.
1780 So search through a sequence of barriers, labels, and notes for
1781 the head of block C and assert that we really do fall through. */
1783 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1784 if (NONDEBUG_INSN_P (q))
1785 return;
1787 /* Remove what will soon cease being the jump insn from the source block.
1788 If block B consisted only of this single jump, turn it into a deleted
1789 note. */
1790 q = BB_END (b);
1791 if (JUMP_P (q)
1792 && onlyjump_p (q)
1793 && (any_uncondjump_p (q)
1794 || single_succ_p (b)))
1796 rtx_insn *label;
1797 rtx_jump_table_data *table;
1799 if (tablejump_p (q, &label, &table))
1801 /* The label is likely mentioned in some instruction before
1802 the tablejump and might not be DCEd, so turn it into
1803 a note instead and move before the tablejump that is going to
1804 be deleted. */
1805 const char *name = LABEL_NAME (label);
1806 PUT_CODE (label, NOTE);
1807 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1808 NOTE_DELETED_LABEL_NAME (label) = name;
1809 reorder_insns (label, label, PREV_INSN (q));
1810 delete_insn (table);
1813 q = PREV_INSN (q);
1815 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1816 together with the barrier) should never have a fallthru edge. */
1817 else if (JUMP_P (q) && any_uncondjump_p (q))
1818 return;
1820 /* Selectively unlink the sequence. */
1821 if (q != PREV_INSN (BB_HEAD (c)))
1822 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1824 e->flags |= EDGE_FALLTHRU;
1827 /* Should move basic block BB after basic block AFTER. NIY. */
1829 static bool
1830 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1831 basic_block after ATTRIBUTE_UNUSED)
1833 return false;
1836 /* Locate the last bb in the same partition as START_BB. */
1838 static basic_block
1839 last_bb_in_partition (basic_block start_bb)
1841 basic_block bb;
1842 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1844 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1845 return bb;
1847 /* Return bb before the exit block. */
1848 return bb->prev_bb;
1851 /* Split a (typically critical) edge. Return the new block.
1852 The edge must not be abnormal.
1854 ??? The code generally expects to be called on critical edges.
1855 The case of a block ending in an unconditional jump to a
1856 block with multiple predecessors is not handled optimally. */
1858 static basic_block
1859 rtl_split_edge (edge edge_in)
1861 basic_block bb, new_bb;
1862 rtx_insn *before;
1864 /* Abnormal edges cannot be split. */
1865 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1867 /* We are going to place the new block in front of edge destination.
1868 Avoid existence of fallthru predecessors. */
1869 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1871 edge e = find_fallthru_edge (edge_in->dest->preds);
1873 if (e)
1874 force_nonfallthru (e);
1877 /* Create the basic block note. */
1878 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1879 before = BB_HEAD (edge_in->dest);
1880 else
1881 before = NULL;
1883 /* If this is a fall through edge to the exit block, the blocks might be
1884 not adjacent, and the right place is after the source. */
1885 if ((edge_in->flags & EDGE_FALLTHRU)
1886 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1888 before = NEXT_INSN (BB_END (edge_in->src));
1889 bb = create_basic_block (before, NULL, edge_in->src);
1890 BB_COPY_PARTITION (bb, edge_in->src);
1892 else
1894 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1896 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1897 BB_COPY_PARTITION (bb, edge_in->dest);
1899 else
1901 basic_block after = edge_in->dest->prev_bb;
1902 /* If this is post-bb reordering, and the edge crosses a partition
1903 boundary, the new block needs to be inserted in the bb chain
1904 at the end of the src partition (since we put the new bb into
1905 that partition, see below). Otherwise we may end up creating
1906 an extra partition crossing in the chain, which is illegal.
1907 It can't go after the src, because src may have a fall-through
1908 to a different block. */
1909 if (crtl->bb_reorder_complete
1910 && (edge_in->flags & EDGE_CROSSING))
1912 after = last_bb_in_partition (edge_in->src);
1913 before = get_last_bb_insn (after);
1914 /* The instruction following the last bb in partition should
1915 be a barrier, since it cannot end in a fall-through. */
1916 gcc_checking_assert (BARRIER_P (before));
1917 before = NEXT_INSN (before);
1919 bb = create_basic_block (before, NULL, after);
1920 /* Put the split bb into the src partition, to avoid creating
1921 a situation where a cold bb dominates a hot bb, in the case
1922 where src is cold and dest is hot. The src will dominate
1923 the new bb (whereas it might not have dominated dest). */
1924 BB_COPY_PARTITION (bb, edge_in->src);
1928 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1930 /* Can't allow a region crossing edge to be fallthrough. */
1931 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1932 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1934 new_bb = force_nonfallthru (single_succ_edge (bb));
1935 gcc_assert (!new_bb);
1938 /* For non-fallthru edges, we must adjust the predecessor's
1939 jump instruction to target our new block. */
1940 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1942 edge redirected = redirect_edge_and_branch (edge_in, bb);
1943 gcc_assert (redirected);
1945 else
1947 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1949 /* For asm goto even splitting of fallthru edge might
1950 need insn patching, as other labels might point to the
1951 old label. */
1952 rtx_insn *last = BB_END (edge_in->src);
1953 if (last
1954 && JUMP_P (last)
1955 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1956 && (extract_asm_operands (PATTERN (last))
1957 || JUMP_LABEL (last) == before)
1958 && patch_jump_insn (last, before, bb))
1959 df_set_bb_dirty (edge_in->src);
1961 redirect_edge_succ (edge_in, bb);
1964 return bb;
1967 /* Queue instructions for insertion on an edge between two basic blocks.
1968 The new instructions and basic blocks (if any) will not appear in the
1969 CFG until commit_edge_insertions is called. */
1971 void
1972 insert_insn_on_edge (rtx pattern, edge e)
1974 /* We cannot insert instructions on an abnormal critical edge.
1975 It will be easier to find the culprit if we die now. */
1976 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1978 if (e->insns.r == NULL_RTX)
1979 start_sequence ();
1980 else
1981 push_to_sequence (e->insns.r);
1983 emit_insn (pattern);
1985 e->insns.r = get_insns ();
1986 end_sequence ();
1989 /* Update the CFG for the instructions queued on edge E. */
1991 void
1992 commit_one_edge_insertion (edge e)
1994 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1995 basic_block bb;
1997 /* Pull the insns off the edge now since the edge might go away. */
1998 insns = e->insns.r;
1999 e->insns.r = NULL;
2001 /* Figure out where to put these insns. If the destination has
2002 one predecessor, insert there. Except for the exit block. */
2003 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2005 bb = e->dest;
2007 /* Get the location correct wrt a code label, and "nice" wrt
2008 a basic block note, and before everything else. */
2009 tmp = BB_HEAD (bb);
2010 if (LABEL_P (tmp))
2011 tmp = NEXT_INSN (tmp);
2012 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2013 tmp = NEXT_INSN (tmp);
2014 if (tmp == BB_HEAD (bb))
2015 before = tmp;
2016 else if (tmp)
2017 after = PREV_INSN (tmp);
2018 else
2019 after = get_last_insn ();
2022 /* If the source has one successor and the edge is not abnormal,
2023 insert there. Except for the entry block.
2024 Don't do this if the predecessor ends in a jump other than
2025 unconditional simple jump. E.g. for asm goto that points all
2026 its labels at the fallthru basic block, we can't insert instructions
2027 before the asm goto, as the asm goto can have various of side effects,
2028 and can't emit instructions after the asm goto, as it must end
2029 the basic block. */
2030 else if ((e->flags & EDGE_ABNORMAL) == 0
2031 && single_succ_p (e->src)
2032 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2033 && (!JUMP_P (BB_END (e->src))
2034 || simplejump_p (BB_END (e->src))))
2036 bb = e->src;
2038 /* It is possible to have a non-simple jump here. Consider a target
2039 where some forms of unconditional jumps clobber a register. This
2040 happens on the fr30 for example.
2042 We know this block has a single successor, so we can just emit
2043 the queued insns before the jump. */
2044 if (JUMP_P (BB_END (bb)))
2045 before = BB_END (bb);
2046 else
2048 /* We'd better be fallthru, or we've lost track of what's what. */
2049 gcc_assert (e->flags & EDGE_FALLTHRU);
2051 after = BB_END (bb);
2055 /* Otherwise we must split the edge. */
2056 else
2058 bb = split_edge (e);
2060 /* If E crossed a partition boundary, we needed to make bb end in
2061 a region-crossing jump, even though it was originally fallthru. */
2062 if (JUMP_P (BB_END (bb)))
2063 before = BB_END (bb);
2064 else
2065 after = BB_END (bb);
2068 /* Now that we've found the spot, do the insertion. */
2069 if (before)
2071 emit_insn_before_noloc (insns, before, bb);
2072 last = prev_nonnote_insn (before);
2074 else
2075 last = emit_insn_after_noloc (insns, after, bb);
2077 if (returnjump_p (last))
2079 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2080 This is not currently a problem because this only happens
2081 for the (single) epilogue, which already has a fallthru edge
2082 to EXIT. */
2084 e = single_succ_edge (bb);
2085 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2086 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2088 e->flags &= ~EDGE_FALLTHRU;
2089 emit_barrier_after (last);
2091 if (before)
2092 delete_insn (before);
2094 else
2095 /* Some builtin expanders, such as those for memset and memcpy,
2096 may generate loops and conditionals, and those may get emitted
2097 into edges. That's ok while expanding to rtl, basic block
2098 boundaries will be identified and split afterwards. ??? Need
2099 we check whether the destination labels of any inserted jumps
2100 are also part of the inserted sequence? */
2101 gcc_assert (!JUMP_P (last) || currently_expanding_to_rtl);
2104 /* Update the CFG for all queued instructions. */
2106 void
2107 commit_edge_insertions (void)
2109 basic_block bb;
2111 /* Optimization passes that invoke this routine can cause hot blocks
2112 previously reached by both hot and cold blocks to become dominated only
2113 by cold blocks. This will cause the verification below to fail,
2114 and lead to now cold code in the hot section. In some cases this
2115 may only be visible after newly unreachable blocks are deleted,
2116 which will be done by fixup_partitions. */
2117 fixup_partitions ();
2119 if (!currently_expanding_to_rtl)
2120 checking_verify_flow_info ();
2122 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2123 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2125 edge e;
2126 edge_iterator ei;
2128 FOR_EACH_EDGE (e, ei, bb->succs)
2129 if (e->insns.r)
2131 if (currently_expanding_to_rtl)
2132 rebuild_jump_labels_chain (e->insns.r);
2133 commit_one_edge_insertion (e);
2139 /* Print out RTL-specific basic block information (live information
2140 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2141 documented in dumpfile.h. */
2143 static void
2144 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2146 char *s_indent;
2148 s_indent = (char *) alloca ((size_t) indent + 1);
2149 memset (s_indent, ' ', (size_t) indent);
2150 s_indent[indent] = '\0';
2152 if (df && (flags & TDF_DETAILS))
2154 df_dump_top (bb, outf);
2155 putc ('\n', outf);
2158 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK
2159 && rtl_bb_info_initialized_p (bb))
2161 rtx_insn *last = BB_END (bb);
2162 if (last)
2163 last = NEXT_INSN (last);
2164 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2166 if (flags & TDF_DETAILS)
2167 df_dump_insn_top (insn, outf);
2168 if (! (flags & TDF_SLIM))
2169 print_rtl_single (outf, insn);
2170 else
2171 dump_insn_slim (outf, insn);
2172 if (flags & TDF_DETAILS)
2173 df_dump_insn_bottom (insn, outf);
2177 if (df && (flags & TDF_DETAILS))
2179 df_dump_bottom (bb, outf);
2180 putc ('\n', outf);
2185 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2186 for the start of each basic block. FLAGS are the TDF_* masks documented
2187 in dumpfile.h. */
2189 void
2190 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2192 const rtx_insn *tmp_rtx;
2193 if (rtx_first == 0)
2194 fprintf (outf, "(nil)\n");
2195 else
2197 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2198 int max_uid = get_max_uid ();
2199 basic_block *start = XCNEWVEC (basic_block, max_uid);
2200 basic_block *end = XCNEWVEC (basic_block, max_uid);
2201 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2202 basic_block bb;
2204 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2205 insns, but the CFG is not maintained so the basic block info
2206 is not reliable. Therefore it's omitted from the dumps. */
2207 if (! (cfun->curr_properties & PROP_cfg))
2208 flags &= ~TDF_BLOCKS;
2210 if (df)
2211 df_dump_start (outf);
2213 if (cfun->curr_properties & PROP_cfg)
2215 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2217 rtx_insn *x;
2219 start[INSN_UID (BB_HEAD (bb))] = bb;
2220 end[INSN_UID (BB_END (bb))] = bb;
2221 if (flags & TDF_BLOCKS)
2223 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2225 enum bb_state state = IN_MULTIPLE_BB;
2227 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2228 state = IN_ONE_BB;
2229 in_bb_p[INSN_UID (x)] = state;
2231 if (x == BB_END (bb))
2232 break;
2238 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2240 if (flags & TDF_BLOCKS)
2242 bb = start[INSN_UID (tmp_rtx)];
2243 if (bb != NULL)
2245 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2246 if (df && (flags & TDF_DETAILS))
2247 df_dump_top (bb, outf);
2250 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2251 && !NOTE_P (tmp_rtx)
2252 && !BARRIER_P (tmp_rtx))
2253 fprintf (outf, ";; Insn is not within a basic block\n");
2254 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2255 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2258 if (flags & TDF_DETAILS)
2259 df_dump_insn_top (tmp_rtx, outf);
2260 if (! (flags & TDF_SLIM))
2261 print_rtl_single (outf, tmp_rtx);
2262 else
2263 dump_insn_slim (outf, tmp_rtx);
2264 if (flags & TDF_DETAILS)
2265 df_dump_insn_bottom (tmp_rtx, outf);
2267 bb = end[INSN_UID (tmp_rtx)];
2268 if (bb != NULL)
2270 if (flags & TDF_BLOCKS)
2272 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2273 if (df && (flags & TDF_DETAILS))
2274 df_dump_bottom (bb, outf);
2275 putc ('\n', outf);
2277 /* Emit a hint if the fallthrough target of current basic block
2278 isn't the one placed right next. */
2279 else if (EDGE_COUNT (bb->succs) > 0)
2281 gcc_assert (BB_END (bb) == tmp_rtx);
2282 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2283 /* Bypass intervening deleted-insn notes and debug insns. */
2284 while (ninsn
2285 && !NONDEBUG_INSN_P (ninsn)
2286 && !start[INSN_UID (ninsn)])
2287 ninsn = NEXT_INSN (ninsn);
2288 edge e = find_fallthru_edge (bb->succs);
2289 if (e && ninsn)
2291 basic_block dest = e->dest;
2292 if (start[INSN_UID (ninsn)] != dest)
2293 fprintf (outf, "%s ; pc falls through to BB %d\n",
2294 print_rtx_head, dest->index);
2300 free (start);
2301 free (end);
2302 free (in_bb_p);
2306 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2308 void
2309 update_br_prob_note (basic_block bb)
2311 rtx note;
2312 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2313 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2315 if (note)
2317 rtx *note_link, this_rtx;
2319 note_link = &REG_NOTES (BB_END (bb));
2320 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2321 if (this_rtx == note)
2323 *note_link = XEXP (this_rtx, 1);
2324 break;
2327 return;
2329 if (!note
2330 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2331 return;
2332 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2335 /* Get the last insn associated with block BB (that includes barriers and
2336 tablejumps after BB). */
2337 rtx_insn *
2338 get_last_bb_insn (basic_block bb)
2340 rtx_jump_table_data *table;
2341 rtx_insn *tmp;
2342 rtx_insn *end = BB_END (bb);
2344 /* Include any jump table following the basic block. */
2345 if (tablejump_p (end, NULL, &table))
2346 end = table;
2348 /* Include any barriers that may follow the basic block. */
2349 tmp = next_nonnote_nondebug_insn_bb (end);
2350 while (tmp && BARRIER_P (tmp))
2352 end = tmp;
2353 tmp = next_nonnote_nondebug_insn_bb (end);
2356 return end;
2359 /* Add all BBs reachable from entry via hot paths into the SET. */
2361 void
2362 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2364 auto_vec<basic_block, 64> worklist;
2366 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2367 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2369 while (worklist.length () > 0)
2371 basic_block bb = worklist.pop ();
2372 edge_iterator ei;
2373 edge e;
2375 FOR_EACH_EDGE (e, ei, bb->succs)
2376 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2377 && !set->add (e->dest))
2378 worklist.safe_push (e->dest);
2382 /* Sanity check partition hotness to ensure that basic blocks in
2383   the cold partition don't dominate basic blocks in the hot partition.
2384 If FLAG_ONLY is true, report violations as errors. Otherwise
2385 re-mark the dominated blocks as cold, since this is run after
2386 cfg optimizations that may make hot blocks previously reached
2387 by both hot and cold blocks now only reachable along cold paths. */
2389 static auto_vec<basic_block>
2390 find_partition_fixes (bool flag_only)
2392 basic_block bb;
2393 auto_vec<basic_block> bbs_to_fix;
2394 hash_set<basic_block> set;
2396 /* Callers check this. */
2397 gcc_checking_assert (crtl->has_bb_partition);
2399 find_bbs_reachable_by_hot_paths (&set);
2401 FOR_EACH_BB_FN (bb, cfun)
2402 if (!set.contains (bb)
2403 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2405 if (flag_only)
2406 error ("non-cold basic block %d reachable only "
2407 "by paths crossing the cold partition", bb->index);
2408 else
2409 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2410 bbs_to_fix.safe_push (bb);
2413 return bbs_to_fix;
2416 /* Perform cleanup on the hot/cold bb partitioning after optimization
2417 passes that modify the cfg. */
2419 void
2420 fixup_partitions (void)
2422 if (!crtl->has_bb_partition)
2423 return;
2425 /* Delete any blocks that became unreachable and weren't
2426 already cleaned up, for example during edge forwarding
2427 and convert_jumps_to_returns. This will expose more
2428 opportunities for fixing the partition boundaries here.
2429 Also, the calculation of the dominance graph during verification
2430 will assert if there are unreachable nodes. */
2431 delete_unreachable_blocks ();
2433 /* If there are partitions, do a sanity check on them: A basic block in
2434   a cold partition cannot dominate a basic block in a hot partition.
2435 Fixup any that now violate this requirement, as a result of edge
2436 forwarding and unreachable block deletion.  */
2437 auto_vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2439 /* Do the partition fixup after all necessary blocks have been converted to
2440 cold, so that we only update the region crossings the minimum number of
2441 places, which can require forcing edges to be non fallthru. */
2442 if (! bbs_to_fix.is_empty ())
2446 basic_block bb = bbs_to_fix.pop ();
2447 fixup_new_cold_bb (bb);
2449 while (! bbs_to_fix.is_empty ());
2451 /* Fix up hot cold block grouping if needed. */
2452 if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2454 basic_block bb, first = NULL, second = NULL;
2455 int current_partition = BB_UNPARTITIONED;
2457 FOR_EACH_BB_FN (bb, cfun)
2459 if (current_partition != BB_UNPARTITIONED
2460 && BB_PARTITION (bb) != current_partition)
2462 if (first == NULL)
2463 first = bb;
2464 else if (second == NULL)
2465 second = bb;
2466 else
2468 /* If we switch partitions for the 3rd, 5th etc. time,
2469 move bbs first (inclusive) .. second (exclusive) right
2470 before bb. */
2471 basic_block prev_first = first->prev_bb;
2472 basic_block prev_second = second->prev_bb;
2473 basic_block prev_bb = bb->prev_bb;
2474 prev_first->next_bb = second;
2475 second->prev_bb = prev_first;
2476 prev_second->next_bb = bb;
2477 bb->prev_bb = prev_second;
2478 prev_bb->next_bb = first;
2479 first->prev_bb = prev_bb;
2480 rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2481 rtx_insn *prev_second_insn
2482 = PREV_INSN (BB_HEAD (second));
2483 rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2484 SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2485 SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2486 SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2487 SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2488 SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2489 SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2490 second = NULL;
2493 current_partition = BB_PARTITION (bb);
2495 gcc_assert (!second);
2500 /* Verify, in the basic block chain, that there is at most one switch
2501 between hot/cold partitions. This condition will not be true until
2502 after reorder_basic_blocks is called. */
2504 static bool
2505 verify_hot_cold_block_grouping (void)
2507 basic_block bb;
2508 bool err = false;
2509 bool switched_sections = false;
2510 int current_partition = BB_UNPARTITIONED;
2512 /* Even after bb reordering is complete, we go into cfglayout mode
2513 again (in compgoto). Ensure we don't call this before going back
2514 into linearized RTL when any layout fixes would have been committed. */
2515 if (!crtl->bb_reorder_complete
2516 || current_ir_type () != IR_RTL_CFGRTL)
2517 return err;
2519 FOR_EACH_BB_FN (bb, cfun)
2521 if (current_partition != BB_UNPARTITIONED
2522 && BB_PARTITION (bb) != current_partition)
2524 if (switched_sections)
2526 error ("multiple hot/cold transitions found (bb %i)",
2527 bb->index);
2528 err = true;
2530 else
2531 switched_sections = true;
2533 if (!crtl->has_bb_partition)
2534 error ("partition found but function partition flag not set");
2536 current_partition = BB_PARTITION (bb);
2539 return err;
2543 /* Perform several checks on the edges out of each block, such as
2544 the consistency of the branch probabilities, the correctness
2545 of hot/cold partition crossing edges, and the number of expected
2546 successor edges. Also verify that the dominance relationship
2547 between hot/cold blocks is sane. */
2549 static bool
2550 rtl_verify_edges (void)
2552 bool err = false;
2553 basic_block bb;
2555 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2557 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2558 int n_eh = 0, n_abnormal = 0;
2559 edge e, fallthru = NULL;
2560 edge_iterator ei;
2561 rtx note;
2562 bool has_crossing_edge = false;
2564 if (JUMP_P (BB_END (bb))
2565 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2566 && EDGE_COUNT (bb->succs) >= 2
2567 && any_condjump_p (BB_END (bb)))
2569 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2571 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2573 error ("verify_flow_info: "
2574 "REG_BR_PROB is set but cfg probability is not");
2575 err = true;
2578 else if (XINT (note, 0)
2579 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2580 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2582 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2583 XINT (note, 0),
2584 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2585 err = true;
2589 FOR_EACH_EDGE (e, ei, bb->succs)
2591 bool is_crossing;
2593 if (e->flags & EDGE_FALLTHRU)
2594 n_fallthru++, fallthru = e;
2596 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2597 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2598 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2599 has_crossing_edge |= is_crossing;
2600 if (e->flags & EDGE_CROSSING)
2602 if (!is_crossing)
2604 error ("EDGE_CROSSING incorrectly set across same section");
2605 err = true;
2607 if (e->flags & EDGE_FALLTHRU)
2609 error ("fallthru edge crosses section boundary in bb %i",
2610 e->src->index);
2611 err = true;
2613 if (e->flags & EDGE_EH)
2615 error ("EH edge crosses section boundary in bb %i",
2616 e->src->index);
2617 err = true;
2619 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2621 error ("No region crossing jump at section boundary in bb %i",
2622 bb->index);
2623 err = true;
2626 else if (is_crossing)
2628 error ("EDGE_CROSSING missing across section boundary");
2629 err = true;
2632 if ((e->flags & ~(EDGE_DFS_BACK
2633 | EDGE_CAN_FALLTHRU
2634 | EDGE_IRREDUCIBLE_LOOP
2635 | EDGE_LOOP_EXIT
2636 | EDGE_CROSSING
2637 | EDGE_PRESERVE)) == 0)
2638 n_branch++;
2640 if (e->flags & EDGE_ABNORMAL_CALL)
2641 n_abnormal_call++;
2643 if (e->flags & EDGE_SIBCALL)
2644 n_sibcall++;
2646 if (e->flags & EDGE_EH)
2647 n_eh++;
2649 if (e->flags & EDGE_ABNORMAL)
2650 n_abnormal++;
2653 if (!has_crossing_edge
2654 && JUMP_P (BB_END (bb))
2655 && CROSSING_JUMP_P (BB_END (bb)))
2657 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2658 error ("Region crossing jump across same section in bb %i",
2659 bb->index);
2660 err = true;
2663 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2665 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2666 err = true;
2668 if (n_eh > 1)
2670 error ("too many exception handling edges in bb %i", bb->index);
2671 err = true;
2673 if (n_branch
2674 && (!JUMP_P (BB_END (bb))
2675 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2676 || any_condjump_p (BB_END (bb))))))
2678 error ("too many outgoing branch edges from bb %i", bb->index);
2679 err = true;
2681 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2683 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2684 err = true;
2686 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2688 error ("wrong number of branch edges after unconditional jump"
2689 " in bb %i", bb->index);
2690 err = true;
2692 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2693 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2695 error ("wrong amount of branch edges after conditional jump"
2696 " in bb %i", bb->index);
2697 err = true;
2699 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2701 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2702 err = true;
2704 if (n_sibcall && !CALL_P (BB_END (bb)))
2706 error ("sibcall edges for non-call insn in bb %i", bb->index);
2707 err = true;
2709 if (n_abnormal > n_eh
2710 && !(CALL_P (BB_END (bb))
2711 && n_abnormal == n_abnormal_call + n_sibcall)
2712 && (!JUMP_P (BB_END (bb))
2713 || any_condjump_p (BB_END (bb))
2714 || any_uncondjump_p (BB_END (bb))))
2716 error ("abnormal edges for no purpose in bb %i", bb->index);
2717 err = true;
2720 int has_eh = -1;
2721 FOR_EACH_EDGE (e, ei, bb->preds)
2723 if (has_eh == -1)
2724 has_eh = (e->flags & EDGE_EH);
2725 if ((e->flags & EDGE_EH) == has_eh)
2726 continue;
2727 error ("EH incoming edge mixed with non-EH incoming edges "
2728 "in bb %i", bb->index);
2729 err = true;
2730 break;
2734 /* If there are partitions, do a sanity check on them: A basic block in
2735   a cold partition cannot dominate a basic block in a hot partition.  */
2736 if (crtl->has_bb_partition && !err
2737 && current_ir_type () == IR_RTL_CFGLAYOUT)
2739 auto_vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2740 err = !bbs_to_fix.is_empty ();
2743 /* Clean up. */
2744 return err;
2747 /* Checks on the instructions within blocks. Currently checks that each
2748 block starts with a basic block note, and that basic block notes and
2749 control flow jumps are not found in the middle of the block. */
2751 static bool
2752 rtl_verify_bb_insns (void)
2754 rtx_insn *x;
2755 bool err = false;
2756 basic_block bb;
2758 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2760 /* Now check the header of basic
2761 block. It ought to contain optional CODE_LABEL followed
2762 by NOTE_BASIC_BLOCK. */
2763 x = BB_HEAD (bb);
2764 if (LABEL_P (x))
2766 if (BB_END (bb) == x)
2768 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2769 bb->index);
2770 err = true;
2773 x = NEXT_INSN (x);
2776 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2778 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2779 bb->index);
2780 err = true;
2783 if (BB_END (bb) == x)
2784 /* Do checks for empty blocks here. */
2786 else
2787 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2789 if (NOTE_INSN_BASIC_BLOCK_P (x))
2791 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2792 INSN_UID (x), bb->index);
2793 err = true;
2796 if (x == BB_END (bb))
2797 break;
2799 if (control_flow_insn_p (x))
2801 error ("in basic block %d:", bb->index);
2802 fatal_insn ("flow control insn inside a basic block", x);
2807 /* Clean up. */
2808 return err;
2811 /* Verify that block pointers for instructions in basic blocks, headers and
2812 footers are set appropriately. */
2814 static bool
2815 rtl_verify_bb_pointers (void)
2817 bool err = false;
2818 basic_block bb;
2820 /* Check the general integrity of the basic blocks. */
2821 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2823 rtx_insn *insn;
2825 if (!(bb->flags & BB_RTL))
2827 error ("BB_RTL flag not set for block %d", bb->index);
2828 err = true;
2831 FOR_BB_INSNS (bb, insn)
2832 if (BLOCK_FOR_INSN (insn) != bb)
2834 error ("insn %d basic block pointer is %d, should be %d",
2835 INSN_UID (insn),
2836 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2837 bb->index);
2838 err = true;
2841 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2842 if (!BARRIER_P (insn)
2843 && BLOCK_FOR_INSN (insn) != NULL)
2845 error ("insn %d in header of bb %d has non-NULL basic block",
2846 INSN_UID (insn), bb->index);
2847 err = true;
2849 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2850 if (!BARRIER_P (insn)
2851 && BLOCK_FOR_INSN (insn) != NULL)
2853 error ("insn %d in footer of bb %d has non-NULL basic block",
2854 INSN_UID (insn), bb->index);
2855 err = true;
2859 /* Clean up. */
2860 return err;
2863 /* Verify the CFG and RTL consistency common for both underlying RTL and
2864 cfglayout RTL.
2866 Currently it does following checks:
2868 - overlapping of basic blocks
2869 - insns with wrong BLOCK_FOR_INSN pointers
2870 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2871 - tails of basic blocks (ensure that boundary is necessary)
2872 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2873 and NOTE_INSN_BASIC_BLOCK
2874 - verify that no fall_thru edge crosses hot/cold partition boundaries
2875 - verify that there are no pending RTL branch predictions
2876 - verify that hot blocks are not dominated by cold blocks
2878 In future it can be extended check a lot of other stuff as well
2879 (reachability of basic blocks, life information, etc. etc.). */
2881 static bool
2882 rtl_verify_flow_info_1 (void)
2884 bool err = false;
2886 if (rtl_verify_bb_pointers ())
2887 err = true;
2889 if (rtl_verify_bb_insns ())
2890 err = true;
2892 if (rtl_verify_edges ())
2893 err = true;
2895 return err;
2898 /* Walk the instruction chain and verify that bb head/end pointers
2899 are correct, and that instructions are in exactly one bb and have
2900 correct block pointers. */
2902 static bool
2903 rtl_verify_bb_insn_chain (void)
2905 basic_block bb;
2906 bool err = false;
2907 rtx_insn *x;
2908 rtx_insn *last_head = get_last_insn ();
2909 basic_block *bb_info;
2910 const int max_uid = get_max_uid ();
2912 bb_info = XCNEWVEC (basic_block, max_uid);
2914 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2916 rtx_insn *head = BB_HEAD (bb);
2917 rtx_insn *end = BB_END (bb);
2919 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2921 /* Verify the end of the basic block is in the INSN chain. */
2922 if (x == end)
2923 break;
2925 /* And that the code outside of basic blocks has NULL bb field. */
2926 if (!BARRIER_P (x)
2927 && BLOCK_FOR_INSN (x) != NULL)
2929 error ("insn %d outside of basic blocks has non-NULL bb field",
2930 INSN_UID (x));
2931 err = true;
2935 if (!x)
2937 error ("end insn %d for block %d not found in the insn stream",
2938 INSN_UID (end), bb->index);
2939 err = true;
2942 /* Work backwards from the end to the head of the basic block
2943 to verify the head is in the RTL chain. */
2944 for (; x != NULL_RTX; x = PREV_INSN (x))
2946 /* While walking over the insn chain, verify insns appear
2947 in only one basic block. */
2948 if (bb_info[INSN_UID (x)] != NULL)
2950 error ("insn %d is in multiple basic blocks (%d and %d)",
2951 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2952 err = true;
2955 bb_info[INSN_UID (x)] = bb;
2957 if (x == head)
2958 break;
2960 if (!x)
2962 error ("head insn %d for block %d not found in the insn stream",
2963 INSN_UID (head), bb->index);
2964 err = true;
2967 last_head = PREV_INSN (x);
2970 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2972 /* Check that the code before the first basic block has NULL
2973 bb field. */
2974 if (!BARRIER_P (x)
2975 && BLOCK_FOR_INSN (x) != NULL)
2977 error ("insn %d outside of basic blocks has non-NULL bb field",
2978 INSN_UID (x));
2979 err = true;
2982 free (bb_info);
2984 return err;
2987 /* Verify that fallthru edges point to adjacent blocks in layout order and
2988 that barriers exist after non-fallthru blocks. */
2990 static bool
2991 rtl_verify_fallthru (void)
2993 basic_block bb;
2994 bool err = false;
2996 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2998 edge e;
3000 e = find_fallthru_edge (bb->succs);
3001 if (!e)
3003 rtx_insn *insn;
3005 /* Ensure existence of barrier in BB with no fallthru edges. */
3006 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
3008 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3010 error ("missing barrier after block %i", bb->index);
3011 err = true;
3012 break;
3014 if (BARRIER_P (insn))
3015 break;
3018 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3019 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3021 rtx_insn *insn;
3023 if (e->src->next_bb != e->dest)
3025 error
3026 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3027 e->src->index, e->dest->index);
3028 err = true;
3030 else
3031 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3032 insn = NEXT_INSN (insn))
3033 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3035 error ("verify_flow_info: Incorrect fallthru %i->%i",
3036 e->src->index, e->dest->index);
3037 error ("wrong insn in the fallthru edge");
3038 debug_rtx (insn);
3039 err = true;
3044 return err;
3047 /* Verify that blocks are laid out in consecutive order. While walking the
3048 instructions, verify that all expected instructions are inside the basic
3049 blocks, and that all returns are followed by barriers. */
3051 static bool
3052 rtl_verify_bb_layout (void)
3054 basic_block bb;
3055 bool err = false;
3056 rtx_insn *x, *y;
3057 int num_bb_notes;
3058 rtx_insn * const rtx_first = get_insns ();
3059 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3061 num_bb_notes = 0;
3063 for (x = rtx_first; x; x = NEXT_INSN (x))
3065 if (NOTE_INSN_BASIC_BLOCK_P (x))
3067 bb = NOTE_BASIC_BLOCK (x);
3069 num_bb_notes++;
3070 if (bb != last_bb_seen->next_bb)
3071 internal_error ("basic blocks not laid down consecutively");
3073 curr_bb = last_bb_seen = bb;
3076 if (!curr_bb)
3078 switch (GET_CODE (x))
3080 case BARRIER:
3081 case NOTE:
3082 break;
3084 case CODE_LABEL:
3085 /* An ADDR_VEC is placed outside any basic block. */
3086 if (NEXT_INSN (x)
3087 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3088 x = NEXT_INSN (x);
3090 /* But in any case, non-deletable labels can appear anywhere. */
3091 break;
3093 default:
3094 fatal_insn ("insn outside basic block", x);
3098 if (JUMP_P (x)
3099 && returnjump_p (x) && ! condjump_p (x)
3100 && ! ((y = next_nonnote_nondebug_insn (x))
3101 && BARRIER_P (y)))
3102 fatal_insn ("return not followed by barrier", x);
3104 if (curr_bb && x == BB_END (curr_bb))
3105 curr_bb = NULL;
3108 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3109 internal_error
3110 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3111 num_bb_notes, n_basic_blocks_for_fn (cfun));
3113 return err;
3116 /* Verify the CFG and RTL consistency common for both underlying RTL and
3117 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3119 Currently it does following checks:
3120 - all checks of rtl_verify_flow_info_1
3121 - test head/end pointers
3122 - check that blocks are laid out in consecutive order
3123 - check that all insns are in the basic blocks
3124 (except the switch handling code, barriers and notes)
3125 - check that all returns are followed by barriers
3126 - check that all fallthru edge points to the adjacent blocks
3127 - verify that there is a single hot/cold partition boundary after bbro */
3129 static bool
3130 rtl_verify_flow_info (void)
3132 bool err = false;
3134 if (rtl_verify_flow_info_1 ())
3135 err = true;
3137 if (rtl_verify_bb_insn_chain ())
3138 err = true;
3140 if (rtl_verify_fallthru ())
3141 err = true;
3143 if (rtl_verify_bb_layout ())
3144 err = true;
3146 if (verify_hot_cold_block_grouping ())
3147 err = true;
3149 return err;
3152 /* Assume that the preceding pass has possibly eliminated jump instructions
3153 or converted the unconditional jumps. Eliminate the edges from CFG.
3154 Return true if any edges are eliminated. */
3156 bool
3157 purge_dead_edges (basic_block bb)
3159 edge e;
3160 rtx_insn *insn = BB_END (bb);
3161 rtx note;
3162 bool purged = false;
3163 bool found;
3164 edge_iterator ei;
3166 if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3168 insn = PREV_INSN (insn);
3169 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3171 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3172 if (NONJUMP_INSN_P (insn)
3173 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3175 rtx eqnote;
3177 if (! may_trap_p (PATTERN (insn))
3178 || ((eqnote = find_reg_equal_equiv_note (insn))
3179 && ! may_trap_p (XEXP (eqnote, 0))))
3180 remove_note (insn, note);
3183 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3184 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3186 bool remove = false;
3188 /* There are three types of edges we need to handle correctly here: EH
3189 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3190 latter can appear when nonlocal gotos are used. */
3191 if (e->flags & EDGE_ABNORMAL_CALL)
3193 if (!CALL_P (insn))
3194 remove = true;
3195 else if (can_nonlocal_goto (insn))
3197 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3199 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3201 else
3202 remove = true;
3204 else if (e->flags & EDGE_EH)
3205 remove = !can_throw_internal (insn);
3207 if (remove)
3209 remove_edge (e);
3210 df_set_bb_dirty (bb);
3211 purged = true;
3213 else
3214 ei_next (&ei);
3217 if (JUMP_P (insn))
3219 rtx note;
3220 edge b,f;
3221 edge_iterator ei;
3223 /* We do care only about conditional jumps and simplejumps. */
3224 if (!any_condjump_p (insn)
3225 && !returnjump_p (insn)
3226 && !simplejump_p (insn))
3227 return purged;
3229 /* Branch probability/prediction notes are defined only for
3230 condjumps. We've possibly turned condjump into simplejump. */
3231 if (simplejump_p (insn))
3233 note = find_reg_note (insn, REG_BR_PROB, NULL);
3234 if (note)
3235 remove_note (insn, note);
3236 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3237 remove_note (insn, note);
3240 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3242 /* Avoid abnormal flags to leak from computed jumps turned
3243 into simplejumps. */
3245 e->flags &= ~EDGE_ABNORMAL;
3247 /* See if this edge is one we should keep. */
3248 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3249 /* A conditional jump can fall through into the next
3250 block, so we should keep the edge. */
3252 ei_next (&ei);
3253 continue;
3255 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3256 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3257 /* If the destination block is the target of the jump,
3258 keep the edge. */
3260 ei_next (&ei);
3261 continue;
3263 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3264 && returnjump_p (insn))
3265 /* If the destination block is the exit block, and this
3266 instruction is a return, then keep the edge. */
3268 ei_next (&ei);
3269 continue;
3271 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3272 /* Keep the edges that correspond to exceptions thrown by
3273 this instruction and rematerialize the EDGE_ABNORMAL
3274 flag we just cleared above. */
3276 e->flags |= EDGE_ABNORMAL;
3277 ei_next (&ei);
3278 continue;
3281 /* We do not need this edge. */
3282 df_set_bb_dirty (bb);
3283 purged = true;
3284 remove_edge (e);
3287 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3288 return purged;
3290 if (dump_file)
3291 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3293 if (!optimize)
3294 return purged;
3296 /* Redistribute probabilities. */
3297 if (single_succ_p (bb))
3299 single_succ_edge (bb)->probability = profile_probability::always ();
3301 else
3303 note = find_reg_note (insn, REG_BR_PROB, NULL);
3304 if (!note)
3305 return purged;
3307 b = BRANCH_EDGE (bb);
3308 f = FALLTHRU_EDGE (bb);
3309 b->probability = profile_probability::from_reg_br_prob_note
3310 (XINT (note, 0));
3311 f->probability = b->probability.invert ();
3314 return purged;
3316 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3318 /* First, there should not be any EH or ABCALL edges resulting
3319 from non-local gotos and the like. If there were, we shouldn't
3320 have created the sibcall in the first place. Second, there
3321 should of course never have been a fallthru edge. */
3322 gcc_assert (single_succ_p (bb));
3323 gcc_assert (single_succ_edge (bb)->flags
3324 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3326 return false;
3329 /* If we don't see a jump insn, we don't know exactly why the block would
3330 have been broken at this point. Look for a simple, non-fallthru edge,
3331 as these are only created by conditional branches. If we find such an
3332 edge we know that there used to be a jump here and can then safely
3333 remove all non-fallthru edges. */
3334 found = false;
3335 FOR_EACH_EDGE (e, ei, bb->succs)
3336 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3338 found = true;
3339 break;
3342 if (!found)
3343 return purged;
3345 /* Remove all but the fake and fallthru edges. The fake edge may be
3346 the only successor for this block in the case of noreturn
3347 calls. */
3348 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3350 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3352 df_set_bb_dirty (bb);
3353 remove_edge (e);
3354 purged = true;
3356 else
3357 ei_next (&ei);
3360 gcc_assert (single_succ_p (bb));
3362 single_succ_edge (bb)->probability = profile_probability::always ();
3364 if (dump_file)
3365 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3366 bb->index);
3367 return purged;
3370 /* Search all basic blocks for potentially dead edges and purge them. Return
3371 true if some edge has been eliminated. */
3373 bool
3374 purge_all_dead_edges (void)
3376 bool purged = false;
3377 basic_block bb;
3379 FOR_EACH_BB_FN (bb, cfun)
3380 if (purge_dead_edges (bb))
3381 purged = true;
3383 return purged;
3386 /* This is used by a few passes that emit some instructions after abnormal
3387 calls, moving the basic block's end, while they in fact do want to emit
3388 them on the fallthru edge. Look for abnormal call edges, find backward
3389 the call in the block and insert the instructions on the edge instead.
3391 Similarly, handle instructions throwing exceptions internally.
3393 Return true when instructions have been found and inserted on edges. */
3395 bool
3396 fixup_abnormal_edges (void)
3398 bool inserted = false;
3399 basic_block bb;
3401 FOR_EACH_BB_FN (bb, cfun)
3403 edge e;
3404 edge_iterator ei;
3406 /* Look for cases we are interested in - calls or instructions causing
3407 exceptions. */
3408 FOR_EACH_EDGE (e, ei, bb->succs)
3409 if ((e->flags & EDGE_ABNORMAL_CALL)
3410 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3411 == (EDGE_ABNORMAL | EDGE_EH)))
3412 break;
3414 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3416 rtx_insn *insn;
3418 /* Get past the new insns generated. Allow notes, as the insns
3419 may be already deleted. */
3420 insn = BB_END (bb);
3421 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3422 && !can_throw_internal (insn)
3423 && insn != BB_HEAD (bb))
3424 insn = PREV_INSN (insn);
3426 if (CALL_P (insn) || can_throw_internal (insn))
3428 rtx_insn *stop, *next;
3430 e = find_fallthru_edge (bb->succs);
3432 stop = NEXT_INSN (BB_END (bb));
3433 BB_END (bb) = insn;
3435 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3437 next = NEXT_INSN (insn);
3438 if (INSN_P (insn))
3440 delete_insn (insn);
3442 /* Sometimes there's still the return value USE.
3443 If it's placed after a trapping call (i.e. that
3444 call is the last insn anyway), we have no fallthru
3445 edge. Simply delete this use and don't try to insert
3446 on the non-existent edge.
3447 Similarly, sometimes a call that can throw is
3448 followed in the source with __builtin_unreachable (),
3449 meaning that there is UB if the call returns rather
3450 than throws. If there weren't any instructions
3451 following such calls before, supposedly even the ones
3452 we've deleted aren't significant and can be
3453 removed. */
3454 if (e)
3456 /* We're not deleting it, we're moving it. */
3457 insn->set_undeleted ();
3458 SET_PREV_INSN (insn) = NULL_RTX;
3459 SET_NEXT_INSN (insn) = NULL_RTX;
3461 insert_insn_on_edge (insn, e);
3462 inserted = true;
3465 else if (!BARRIER_P (insn))
3466 set_block_for_insn (insn, NULL);
3470 /* It may be that we don't find any trapping insn. In this
3471 case we discovered quite late that the insn that had been
3472 marked as can_throw_internal in fact couldn't trap at all.
3473 So we should in fact delete the EH edges out of the block. */
3474 else
3475 purge_dead_edges (bb);
3479 return inserted;
3482 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
3483 Note that the INSN should be deleted *after* removing dead edges, so
3484 that the kept edge is the fallthrough edge for a (set (pc) (pc))
3485 but not for a (set (pc) (label_ref FOO)). */
3487 void
3488 update_cfg_for_uncondjump (rtx_insn *insn)
3490 basic_block bb = BLOCK_FOR_INSN (insn);
3491 gcc_assert (BB_END (bb) == insn);
3493 purge_dead_edges (bb);
3495 if (current_ir_type () != IR_RTL_CFGLAYOUT)
3497 if (!find_fallthru_edge (bb->succs))
3499 auto barrier = next_nonnote_nondebug_insn (insn);
3500 if (!barrier || !BARRIER_P (barrier))
3501 emit_barrier_after (insn);
3503 return;
3506 delete_insn (insn);
3507 if (EDGE_COUNT (bb->succs) == 1)
3509 rtx_insn *insn;
3511 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3513 /* Remove barriers from the footer if there are any. */
3514 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
3515 if (BARRIER_P (insn))
3517 if (PREV_INSN (insn))
3518 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3519 else
3520 BB_FOOTER (bb) = NEXT_INSN (insn);
3521 if (NEXT_INSN (insn))
3522 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3524 else if (LABEL_P (insn))
3525 break;
3529 /* Cut the insns from FIRST to LAST out of the insns stream. */
3531 rtx_insn *
3532 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3534 rtx_insn *prevfirst = PREV_INSN (first);
3535 rtx_insn *nextlast = NEXT_INSN (last);
3537 SET_PREV_INSN (first) = NULL;
3538 SET_NEXT_INSN (last) = NULL;
3539 if (prevfirst)
3540 SET_NEXT_INSN (prevfirst) = nextlast;
3541 if (nextlast)
3542 SET_PREV_INSN (nextlast) = prevfirst;
3543 else
3544 set_last_insn (prevfirst);
3545 if (!prevfirst)
3546 set_first_insn (nextlast);
3547 return first;
3550 /* Skip over inter-block insns occurring after BB which are typically
3551 associated with BB (e.g., barriers). If there are any such insns,
3552 we return the last one. Otherwise, we return the end of BB. */
3554 static rtx_insn *
3555 skip_insns_after_block (basic_block bb)
3557 rtx_insn *insn, *last_insn, *next_head, *prev;
3559 next_head = NULL;
3560 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3561 next_head = BB_HEAD (bb->next_bb);
3563 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3565 if (insn == next_head)
3566 break;
3568 switch (GET_CODE (insn))
3570 case BARRIER:
3571 last_insn = insn;
3572 continue;
3574 case NOTE:
3575 gcc_assert (NOTE_KIND (insn) != NOTE_INSN_BLOCK_END);
3576 continue;
3578 case CODE_LABEL:
3579 if (NEXT_INSN (insn)
3580 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3582 insn = NEXT_INSN (insn);
3583 last_insn = insn;
3584 continue;
3586 break;
3588 default:
3589 break;
3592 break;
3595 /* It is possible to hit contradictory sequence. For instance:
3597 jump_insn
3598 NOTE_INSN_BLOCK_BEG
3599 barrier
3601 Where barrier belongs to jump_insn, but the note does not. This can be
3602 created by removing the basic block originally following
3603 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3605 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3607 prev = PREV_INSN (insn);
3608 if (NOTE_P (insn))
3609 switch (NOTE_KIND (insn))
3611 case NOTE_INSN_BLOCK_END:
3612 gcc_unreachable ();
3613 break;
3614 case NOTE_INSN_DELETED:
3615 case NOTE_INSN_DELETED_LABEL:
3616 case NOTE_INSN_DELETED_DEBUG_LABEL:
3617 continue;
3618 default:
3619 reorder_insns (insn, insn, last_insn);
3623 return last_insn;
3626 /* Locate or create a label for a given basic block. */
3628 static rtx_insn *
3629 label_for_bb (basic_block bb)
3631 rtx_insn *label = BB_HEAD (bb);
3633 if (!LABEL_P (label))
3635 if (dump_file)
3636 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3638 label = block_label (bb);
3641 return label;
3644 /* Locate the effective beginning and end of the insn chain for each
3645 block, as defined by skip_insns_after_block above. */
3647 static void
3648 record_effective_endpoints (void)
3650 rtx_insn *next_insn;
3651 basic_block bb;
3652 rtx_insn *insn;
3654 for (insn = get_insns ();
3655 insn
3656 && NOTE_P (insn)
3657 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3658 insn = NEXT_INSN (insn))
3659 continue;
3660 /* No basic blocks at all? */
3661 gcc_assert (insn);
3663 if (PREV_INSN (insn))
3664 cfg_layout_function_header =
3665 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3666 else
3667 cfg_layout_function_header = NULL;
3669 next_insn = get_insns ();
3670 FOR_EACH_BB_FN (bb, cfun)
3672 rtx_insn *end;
3674 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3675 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3676 PREV_INSN (BB_HEAD (bb)));
3677 end = skip_insns_after_block (bb);
3678 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3679 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3680 next_insn = NEXT_INSN (BB_END (bb));
3683 cfg_layout_function_footer = next_insn;
3684 if (cfg_layout_function_footer)
3685 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3688 namespace {
3690 const pass_data pass_data_into_cfg_layout_mode =
3692 RTL_PASS, /* type */
3693 "into_cfglayout", /* name */
3694 OPTGROUP_NONE, /* optinfo_flags */
3695 TV_CFG, /* tv_id */
3696 0, /* properties_required */
3697 PROP_cfglayout, /* properties_provided */
3698 0, /* properties_destroyed */
3699 0, /* todo_flags_start */
3700 0, /* todo_flags_finish */
3703 class pass_into_cfg_layout_mode : public rtl_opt_pass
3705 public:
3706 pass_into_cfg_layout_mode (gcc::context *ctxt)
3707 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3710 /* opt_pass methods: */
3711 unsigned int execute (function *) final override
3713 cfg_layout_initialize (0);
3714 return 0;
3717 }; // class pass_into_cfg_layout_mode
3719 } // anon namespace
3721 rtl_opt_pass *
3722 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3724 return new pass_into_cfg_layout_mode (ctxt);
3727 namespace {
3729 const pass_data pass_data_outof_cfg_layout_mode =
3731 RTL_PASS, /* type */
3732 "outof_cfglayout", /* name */
3733 OPTGROUP_NONE, /* optinfo_flags */
3734 TV_CFG, /* tv_id */
3735 0, /* properties_required */
3736 0, /* properties_provided */
3737 PROP_cfglayout, /* properties_destroyed */
3738 0, /* todo_flags_start */
3739 0, /* todo_flags_finish */
3742 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3744 public:
3745 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3746 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3749 /* opt_pass methods: */
3750 unsigned int execute (function *) final override;
3752 }; // class pass_outof_cfg_layout_mode
3754 unsigned int
3755 pass_outof_cfg_layout_mode::execute (function *fun)
3757 basic_block bb;
3759 FOR_EACH_BB_FN (bb, fun)
3760 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3761 bb->aux = bb->next_bb;
3763 cfg_layout_finalize ();
3765 return 0;
3768 } // anon namespace
3770 rtl_opt_pass *
3771 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3773 return new pass_outof_cfg_layout_mode (ctxt);
3777 /* Link the basic blocks in the correct order, compacting the basic
3778 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3779 function also clears the basic block header and footer fields.
3781 This function is usually called after a pass (e.g. tracer) finishes
3782 some transformations while in cfglayout mode. The required sequence
3783 of the basic blocks is in a linked list along the bb->aux field.
3784 This functions re-links the basic block prev_bb and next_bb pointers
3785 accordingly, and it compacts and renumbers the blocks.
3787 FIXME: This currently works only for RTL, but the only RTL-specific
3788 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3789 to GIMPLE a long time ago, but it doesn't relink the basic block
3790 chain. It could do that (to give better initial RTL) if this function
3791 is made IR-agnostic (and moved to cfganal.cc or cfg.cc while at it). */
3793 void
3794 relink_block_chain (bool stay_in_cfglayout_mode)
3796 basic_block bb, prev_bb;
3797 int index;
3799 /* Maybe dump the re-ordered sequence. */
3800 if (dump_file)
3802 fprintf (dump_file, "Reordered sequence:\n");
3803 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3804 NUM_FIXED_BLOCKS;
3806 bb = (basic_block) bb->aux, index++)
3808 fprintf (dump_file, " %i ", index);
3809 if (get_bb_original (bb))
3810 fprintf (dump_file, "duplicate of %i\n",
3811 get_bb_original (bb)->index);
3812 else if (forwarder_block_p (bb)
3813 && !LABEL_P (BB_HEAD (bb)))
3814 fprintf (dump_file, "compensation\n");
3815 else
3816 fprintf (dump_file, "bb %i\n", bb->index);
3820 /* Now reorder the blocks. */
3821 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3822 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3823 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3825 bb->prev_bb = prev_bb;
3826 prev_bb->next_bb = bb;
3828 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3829 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3831 /* Then, clean up the aux fields. */
3832 FOR_ALL_BB_FN (bb, cfun)
3834 bb->aux = NULL;
3835 if (!stay_in_cfglayout_mode)
3836 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3839 /* Maybe reset the original copy tables, they are not valid anymore
3840 when we renumber the basic blocks in compact_blocks. If we are
3841 are going out of cfglayout mode, don't re-allocate the tables. */
3842 if (original_copy_tables_initialized_p ())
3843 free_original_copy_tables ();
3844 if (stay_in_cfglayout_mode)
3845 initialize_original_copy_tables ();
3847 /* Finally, put basic_block_info in the new order. */
3848 compact_blocks ();
3852 /* Given a reorder chain, rearrange the code to match. */
3854 static void
3855 fixup_reorder_chain (void)
3857 basic_block bb;
3858 rtx_insn *insn = NULL;
3860 if (cfg_layout_function_header)
3862 set_first_insn (cfg_layout_function_header);
3863 insn = cfg_layout_function_header;
3864 while (NEXT_INSN (insn))
3865 insn = NEXT_INSN (insn);
3868 /* First do the bulk reordering -- rechain the blocks without regard to
3869 the needed changes to jumps and labels. */
3871 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3872 bb->aux)
3874 if (BB_HEADER (bb))
3876 if (insn)
3877 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3878 else
3879 set_first_insn (BB_HEADER (bb));
3880 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3881 insn = BB_HEADER (bb);
3882 while (NEXT_INSN (insn))
3883 insn = NEXT_INSN (insn);
3885 if (insn)
3886 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3887 else
3888 set_first_insn (BB_HEAD (bb));
3889 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3890 insn = BB_END (bb);
3891 if (BB_FOOTER (bb))
3893 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3894 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3895 while (NEXT_INSN (insn))
3896 insn = NEXT_INSN (insn);
3900 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3901 if (cfg_layout_function_footer)
3902 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3904 while (NEXT_INSN (insn))
3905 insn = NEXT_INSN (insn);
3907 set_last_insn (insn);
3908 if (flag_checking)
3909 verify_insn_chain ();
3911 /* Now add jumps and labels as needed to match the blocks new
3912 outgoing edges. */
3914 bool remove_unreachable_blocks = false;
3915 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3916 bb->aux)
3918 edge e_fall, e_taken, e;
3919 rtx_insn *bb_end_insn;
3920 rtx ret_label = NULL_RTX;
3921 basic_block nb;
3922 edge_iterator ei;
3923 bool asm_goto = false;
3925 if (EDGE_COUNT (bb->succs) == 0)
3926 continue;
3928 /* Find the old fallthru edge, and another non-EH edge for
3929 a taken jump. */
3930 e_taken = e_fall = NULL;
3932 FOR_EACH_EDGE (e, ei, bb->succs)
3933 if (e->flags & EDGE_FALLTHRU)
3934 e_fall = e;
3935 else if (! (e->flags & EDGE_EH))
3936 e_taken = e;
3938 bb_end_insn = BB_END (bb);
3939 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3941 ret_label = JUMP_LABEL (bb_end_jump);
3942 if (any_condjump_p (bb_end_jump))
3944 /* This might happen if the conditional jump has side
3945 effects and could therefore not be optimized away.
3946 Make the basic block to end with a barrier in order
3947 to prevent rtl_verify_flow_info from complaining. */
3948 if (!e_fall)
3950 gcc_assert (!onlyjump_p (bb_end_jump)
3951 || returnjump_p (bb_end_jump)
3952 || (e_taken->flags & EDGE_CROSSING));
3953 emit_barrier_after (bb_end_jump);
3954 continue;
3957 /* If the old fallthru is still next, nothing to do. */
3958 if (bb->aux == e_fall->dest
3959 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3960 continue;
3962 /* The degenerated case of conditional jump jumping to the next
3963 instruction can happen for jumps with side effects. We need
3964 to construct a forwarder block and this will be done just
3965 fine by force_nonfallthru below. */
3966 if (!e_taken)
3969 /* There is another special case: if *neither* block is next,
3970 such as happens at the very end of a function, then we'll
3971 need to add a new unconditional jump. Choose the taken
3972 edge based on known or assumed probability. */
3973 else if (bb->aux != e_taken->dest)
3975 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3977 if (note
3978 && profile_probability::from_reg_br_prob_note
3979 (XINT (note, 0)) < profile_probability::even ()
3980 && invert_jump (bb_end_jump,
3981 (e_fall->dest
3982 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3983 ? NULL_RTX
3984 : label_for_bb (e_fall->dest)), 0))
3986 e_fall->flags &= ~EDGE_FALLTHRU;
3987 gcc_checking_assert (could_fall_through
3988 (e_taken->src, e_taken->dest));
3989 e_taken->flags |= EDGE_FALLTHRU;
3990 update_br_prob_note (bb);
3991 e = e_fall, e_fall = e_taken, e_taken = e;
3995 /* If the "jumping" edge is a crossing edge, and the fall
3996 through edge is non-crossing, leave things as they are. */
3997 else if ((e_taken->flags & EDGE_CROSSING)
3998 && !(e_fall->flags & EDGE_CROSSING))
3999 continue;
4001 /* Otherwise we can try to invert the jump. This will
4002 basically never fail, however, keep up the pretense. */
4003 else if (invert_jump (bb_end_jump,
4004 (e_fall->dest
4005 == EXIT_BLOCK_PTR_FOR_FN (cfun)
4006 ? NULL_RTX
4007 : label_for_bb (e_fall->dest)), 0))
4009 e_fall->flags &= ~EDGE_FALLTHRU;
4010 gcc_checking_assert (could_fall_through
4011 (e_taken->src, e_taken->dest));
4012 e_taken->flags |= EDGE_FALLTHRU;
4013 update_br_prob_note (bb);
4014 if (LABEL_NUSES (ret_label) == 0
4015 && single_pred_p (e_taken->dest))
4016 delete_insn (as_a<rtx_insn *> (ret_label));
4017 continue;
4020 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
4022 /* If the old fallthru is still next or if
4023 asm goto doesn't have a fallthru (e.g. when followed by
4024 __builtin_unreachable ()), nothing to do. */
4025 if (! e_fall
4026 || bb->aux == e_fall->dest
4027 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4028 continue;
4030 /* Otherwise we'll have to use the fallthru fixup below.
4031 But avoid redirecting asm goto to EXIT. */
4032 asm_goto = true;
4034 else
4036 /* Otherwise we have some return, switch or computed
4037 jump. In the 99% case, there should not have been a
4038 fallthru edge. */
4039 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
4040 continue;
4043 else
4045 /* No fallthru implies a noreturn function with EH edges, or
4046 something similarly bizarre. In any case, we don't need to
4047 do anything. */
4048 if (! e_fall)
4049 continue;
4051 /* If the fallthru block is still next, nothing to do. */
4052 if (bb->aux == e_fall->dest)
4053 continue;
4055 /* A fallthru to exit block. */
4056 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4057 continue;
4060 /* If E_FALL->dest is just a return block, then we can emit a
4061 return rather than a jump to the return block. */
4062 rtx_insn *ret, *use;
4063 basic_block dest;
4064 if (!asm_goto
4065 && bb_is_just_return (e_fall->dest, &ret, &use)
4066 && ((PATTERN (ret) == simple_return_rtx && targetm.have_simple_return ())
4067 || (PATTERN (ret) == ret_rtx && targetm.have_return ())))
4069 ret_label = PATTERN (ret);
4070 dest = EXIT_BLOCK_PTR_FOR_FN (cfun);
4072 e_fall->flags &= ~EDGE_CROSSING;
4073 /* E_FALL->dest might become unreachable as a result of
4074 replacing the jump with a return. So arrange to remove
4075 unreachable blocks. */
4076 remove_unreachable_blocks = true;
4078 else
4080 dest = e_fall->dest;
4083 /* We got here if we need to add a new jump insn.
4084 Note force_nonfallthru can delete E_FALL and thus we have to
4085 save E_FALL->src prior to the call to force_nonfallthru. */
4086 nb = force_nonfallthru_and_redirect (e_fall, dest, ret_label);
4087 if (nb)
4089 nb->aux = bb->aux;
4090 bb->aux = nb;
4091 /* Don't process this new block. */
4092 bb = nb;
4096 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4098 /* Annoying special case - jump around dead jumptables left in the code. */
4099 FOR_EACH_BB_FN (bb, cfun)
4101 edge e = find_fallthru_edge (bb->succs);
4103 if (e && !can_fallthru (e->src, e->dest))
4104 force_nonfallthru (e);
4107 /* Ensure goto_locus from edges has some instructions with that locus in RTL
4108 when not optimizing. */
4109 if (!optimize && !DECL_IGNORED_P (current_function_decl))
4110 FOR_EACH_BB_FN (bb, cfun)
4112 edge e;
4113 edge_iterator ei;
4115 FOR_EACH_EDGE (e, ei, bb->succs)
4116 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4117 && !(e->flags & EDGE_ABNORMAL))
4119 edge e2;
4120 edge_iterator ei2;
4121 basic_block dest, nb;
4122 rtx_insn *end;
4124 insn = BB_END (e->src);
4125 end = PREV_INSN (BB_HEAD (e->src));
4126 while (insn != end
4127 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4128 insn = PREV_INSN (insn);
4129 if (insn != end
4130 && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4131 continue;
4132 if (simplejump_p (BB_END (e->src))
4133 && !INSN_HAS_LOCATION (BB_END (e->src)))
4135 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4136 continue;
4138 dest = e->dest;
4139 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4141 /* Non-fallthru edges to the exit block cannot be split. */
4142 if (!(e->flags & EDGE_FALLTHRU))
4143 continue;
4145 else
4147 insn = BB_HEAD (dest);
4148 end = NEXT_INSN (BB_END (dest));
4149 while (insn != end && !NONDEBUG_INSN_P (insn))
4150 insn = NEXT_INSN (insn);
4151 if (insn != end && INSN_HAS_LOCATION (insn)
4152 && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4153 continue;
4155 nb = split_edge (e);
4156 if (!INSN_P (BB_END (nb)))
4157 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4158 nb);
4159 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4161 /* If there are other incoming edges to the destination block
4162 with the same goto locus, redirect them to the new block as
4163 well, this can prevent other such blocks from being created
4164 in subsequent iterations of the loop. */
4165 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4166 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4167 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4168 && e->goto_locus == e2->goto_locus)
4169 redirect_edge_and_branch (e2, nb);
4170 else
4171 ei_next (&ei2);
4175 /* Replacing a jump with a return may have exposed an unreachable
4176 block. Conditionally remove them if such transformations were
4177 made. */
4178 if (remove_unreachable_blocks)
4179 delete_unreachable_blocks ();
4182 /* Perform sanity checks on the insn chain.
4183 1. Check that next/prev pointers are consistent in both the forward and
4184 reverse direction.
4185 2. Count insns in chain, going both directions, and check if equal.
4186 3. Check that get_last_insn () returns the actual end of chain. */
4188 DEBUG_FUNCTION void
4189 verify_insn_chain (void)
4191 rtx_insn *x, *prevx, *nextx;
4192 int insn_cnt1, insn_cnt2;
4194 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4195 x != 0;
4196 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4197 gcc_assert (PREV_INSN (x) == prevx);
4199 gcc_assert (prevx == get_last_insn ());
4201 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4202 x != 0;
4203 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4204 gcc_assert (NEXT_INSN (x) == nextx);
4206 gcc_assert (insn_cnt1 == insn_cnt2);
4209 /* If we have assembler epilogues, the block falling through to exit must
4210 be the last one in the reordered chain when we reach final. Ensure
4211 that this condition is met. */
4212 static void
4213 fixup_fallthru_exit_predecessor (void)
4215 edge e;
4216 basic_block bb = NULL;
4218 /* This transformation is not valid before reload, because we might
4219 separate a call from the instruction that copies the return
4220 value. */
4221 gcc_assert (reload_completed);
4223 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4224 if (e)
4225 bb = e->src;
4227 if (bb && bb->aux)
4229 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4231 /* If the very first block is the one with the fall-through exit
4232 edge, we have to split that block. */
4233 if (c == bb)
4235 bb = split_block_after_labels (bb)->dest;
4236 bb->aux = c->aux;
4237 c->aux = bb;
4238 BB_FOOTER (bb) = BB_FOOTER (c);
4239 BB_FOOTER (c) = NULL;
4242 while (c->aux != bb)
4243 c = (basic_block) c->aux;
4245 c->aux = bb->aux;
4246 while (c->aux)
4247 c = (basic_block) c->aux;
4249 c->aux = bb;
4250 bb->aux = NULL;
4254 /* In case there are more than one fallthru predecessors of exit, force that
4255 there is only one. */
4257 static void
4258 force_one_exit_fallthru (void)
4260 edge e, predecessor = NULL;
4261 bool more = false;
4262 edge_iterator ei;
4263 basic_block forwarder, bb;
4265 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4266 if (e->flags & EDGE_FALLTHRU)
4268 if (predecessor == NULL)
4269 predecessor = e;
4270 else
4272 more = true;
4273 break;
4277 if (!more)
4278 return;
4280 /* Exit has several fallthru predecessors. Create a forwarder block for
4281 them. */
4282 forwarder = split_edge (predecessor);
4283 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4284 (e = ei_safe_edge (ei)); )
4286 if (e->src == forwarder
4287 || !(e->flags & EDGE_FALLTHRU))
4288 ei_next (&ei);
4289 else
4290 redirect_edge_and_branch_force (e, forwarder);
4293 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4294 exit block. */
4295 FOR_EACH_BB_FN (bb, cfun)
4297 if (bb->aux == NULL && bb != forwarder)
4299 bb->aux = forwarder;
4300 break;
4305 /* Return true in case it is possible to duplicate the basic block BB. */
4307 static bool
4308 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4310 /* Do not attempt to duplicate tablejumps, as we need to unshare
4311 the dispatch table. This is difficult to do, as the instructions
4312 computing jump destination may be hoisted outside the basic block. */
4313 if (tablejump_p (BB_END (bb), NULL, NULL))
4314 return false;
4316 /* Do not duplicate blocks containing insns that can't be copied. */
4317 if (targetm.cannot_copy_insn_p)
4319 rtx_insn *insn = BB_HEAD (bb);
4320 while (1)
4322 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4323 return false;
4324 if (insn == BB_END (bb))
4325 break;
4326 insn = NEXT_INSN (insn);
4330 return true;
4333 rtx_insn *
4334 duplicate_insn_chain (rtx_insn *from, rtx_insn *to,
4335 class loop *loop, copy_bb_data *id)
4337 rtx_insn *insn, *next, *copy;
4338 rtx_note *last;
4340 /* Avoid updating of boundaries of previous basic block. The
4341 note will get removed from insn stream in fixup. */
4342 last = emit_note (NOTE_INSN_DELETED);
4344 /* Create copy at the end of INSN chain. The chain will
4345 be reordered later. */
4346 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4348 switch (GET_CODE (insn))
4350 case DEBUG_INSN:
4351 /* Don't duplicate label debug insns. */
4352 if (DEBUG_BIND_INSN_P (insn)
4353 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4354 break;
4355 /* FALLTHRU */
4356 case INSN:
4357 case CALL_INSN:
4358 case JUMP_INSN:
4359 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4360 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4361 && ANY_RETURN_P (JUMP_LABEL (insn)))
4362 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4363 maybe_copy_prologue_epilogue_insn (insn, copy);
4364 /* If requested remap dependence info of cliques brought in
4365 via inlining. */
4366 if (id)
4368 subrtx_iterator::array_type array;
4369 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4370 if (MEM_P (*iter) && MEM_EXPR (*iter))
4372 tree op = MEM_EXPR (*iter);
4373 if (TREE_CODE (op) == WITH_SIZE_EXPR)
4374 op = TREE_OPERAND (op, 0);
4375 while (handled_component_p (op))
4376 op = TREE_OPERAND (op, 0);
4377 if ((TREE_CODE (op) == MEM_REF
4378 || TREE_CODE (op) == TARGET_MEM_REF)
4379 && MR_DEPENDENCE_CLIQUE (op) > 1
4380 && (!loop
4381 || (MR_DEPENDENCE_CLIQUE (op)
4382 != loop->owned_clique)))
4384 if (!id->dependence_map)
4385 id->dependence_map = new hash_map<dependence_hash,
4386 unsigned short>;
4387 bool existed;
4388 unsigned short &newc = id->dependence_map->get_or_insert
4389 (MR_DEPENDENCE_CLIQUE (op), &existed);
4390 if (!existed)
4392 gcc_assert
4393 (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
4394 newc = get_new_clique (cfun);
4396 /* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4397 since MEM_EXPR is shared so make a copy and
4398 walk to the subtree again. */
4399 tree new_expr = unshare_expr (MEM_EXPR (*iter));
4400 if (TREE_CODE (new_expr) == WITH_SIZE_EXPR)
4401 new_expr = TREE_OPERAND (new_expr, 0);
4402 while (handled_component_p (new_expr))
4403 new_expr = TREE_OPERAND (new_expr, 0);
4404 MR_DEPENDENCE_CLIQUE (new_expr) = newc;
4405 set_mem_expr (const_cast <rtx> (*iter), new_expr);
4409 break;
4411 case JUMP_TABLE_DATA:
4412 /* Avoid copying of dispatch tables. We never duplicate
4413 tablejumps, so this can hit only in case the table got
4414 moved far from original jump.
4415 Avoid copying following barrier as well if any
4416 (and debug insns in between). */
4417 for (next = NEXT_INSN (insn);
4418 next != NEXT_INSN (to);
4419 next = NEXT_INSN (next))
4420 if (!DEBUG_INSN_P (next))
4421 break;
4422 if (next != NEXT_INSN (to) && BARRIER_P (next))
4423 insn = next;
4424 break;
4426 case CODE_LABEL:
4427 break;
4429 case BARRIER:
4430 emit_barrier ();
4431 break;
4433 case NOTE:
4434 switch (NOTE_KIND (insn))
4436 /* In case prologue is empty and function contain label
4437 in first BB, we may want to copy the block. */
4438 case NOTE_INSN_PROLOGUE_END:
4440 case NOTE_INSN_DELETED:
4441 case NOTE_INSN_DELETED_LABEL:
4442 case NOTE_INSN_DELETED_DEBUG_LABEL:
4443 /* No problem to strip these. */
4444 case NOTE_INSN_FUNCTION_BEG:
4445 /* There is always just single entry to function. */
4446 case NOTE_INSN_BASIC_BLOCK:
4447 /* We should only switch text sections once. */
4448 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4449 break;
4451 case NOTE_INSN_EPILOGUE_BEG:
4452 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4453 emit_note_copy (as_a <rtx_note *> (insn));
4454 break;
4456 default:
4457 /* All other notes should have already been eliminated. */
4458 gcc_unreachable ();
4460 break;
4461 default:
4462 gcc_unreachable ();
4465 insn = NEXT_INSN (last);
4466 delete_insn (last);
4467 return insn;
4470 /* Create a duplicate of the basic block BB. */
4472 static basic_block
4473 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *id)
4475 rtx_insn *insn;
4476 basic_block new_bb;
4478 class loop *loop = (id && current_loops) ? bb->loop_father : NULL;
4480 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb), loop, id);
4481 new_bb = create_basic_block (insn,
4482 insn ? get_last_insn () : NULL,
4483 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4485 BB_COPY_PARTITION (new_bb, bb);
4486 if (BB_HEADER (bb))
4488 insn = BB_HEADER (bb);
4489 while (NEXT_INSN (insn))
4490 insn = NEXT_INSN (insn);
4491 insn = duplicate_insn_chain (BB_HEADER (bb), insn, loop, id);
4492 if (insn)
4493 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4496 if (BB_FOOTER (bb))
4498 insn = BB_FOOTER (bb);
4499 while (NEXT_INSN (insn))
4500 insn = NEXT_INSN (insn);
4501 insn = duplicate_insn_chain (BB_FOOTER (bb), insn, loop, id);
4502 if (insn)
4503 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4506 return new_bb;
4510 /* Main entry point to this module - initialize the datastructures for
4511 CFG layout changes. It keeps LOOPS up-to-date if not null.
4513 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4515 void
4516 cfg_layout_initialize (int flags)
4518 rtx_insn_list *x;
4519 basic_block bb;
4521 /* Once bb partitioning is complete, cfg layout mode should not be
4522 re-entered. Entering cfg layout mode may require fixups. As an
4523 example, if edge forwarding performed when optimizing the cfg
4524 layout required moving a block from the hot to the cold
4525 section. This would create an illegal partitioning unless some
4526 manual fixup was performed. */
4527 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4529 initialize_original_copy_tables ();
4531 cfg_layout_rtl_register_cfg_hooks ();
4533 record_effective_endpoints ();
4535 /* Make sure that the targets of non local gotos are marked. */
4536 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4538 bb = BLOCK_FOR_INSN (x->insn ());
4539 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4542 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4545 /* Splits superblocks. */
4546 void
4547 break_superblocks (void)
4549 bool need = false;
4550 basic_block bb;
4552 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4553 bitmap_clear (superblocks);
4555 FOR_EACH_BB_FN (bb, cfun)
4556 if (bb->flags & BB_SUPERBLOCK)
4558 bb->flags &= ~BB_SUPERBLOCK;
4559 bitmap_set_bit (superblocks, bb->index);
4560 need = true;
4563 if (need)
4565 rebuild_jump_labels (get_insns ());
4566 find_many_sub_basic_blocks (superblocks);
4570 /* Finalize the changes: reorder insn list according to the sequence specified
4571 by aux pointers, enter compensation code, rebuild scope forest. */
4573 void
4574 cfg_layout_finalize (void)
4576 free_dominance_info (CDI_DOMINATORS);
4577 force_one_exit_fallthru ();
4578 rtl_register_cfg_hooks ();
4579 if (reload_completed && !targetm.have_epilogue ())
4580 fixup_fallthru_exit_predecessor ();
4581 fixup_reorder_chain ();
4583 rebuild_jump_labels (get_insns ());
4584 delete_dead_jumptables ();
4586 if (flag_checking)
4587 verify_insn_chain ();
4588 checking_verify_flow_info ();
4592 /* Same as split_block but update cfg_layout structures. */
4594 static basic_block
4595 cfg_layout_split_block (basic_block bb, void *insnp)
4597 rtx insn = (rtx) insnp;
4598 basic_block new_bb = rtl_split_block (bb, insn);
4600 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4601 BB_FOOTER (bb) = NULL;
4603 return new_bb;
4606 /* Redirect Edge to DEST. */
4607 static edge
4608 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4610 basic_block src = e->src;
4611 edge ret;
4613 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4614 return NULL;
4616 if (e->dest == dest)
4617 return e;
4619 if (e->flags & EDGE_CROSSING
4620 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4621 && simplejump_p (BB_END (src)))
4623 if (dump_file)
4624 fprintf (dump_file,
4625 "Removing crossing jump while redirecting edge form %i to %i\n",
4626 e->src->index, dest->index);
4627 delete_insn (BB_END (src));
4628 remove_barriers_from_footer (src);
4629 e->flags |= EDGE_FALLTHRU;
4632 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4633 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4635 df_set_bb_dirty (src);
4636 return ret;
4639 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4640 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4642 if (dump_file)
4643 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4644 e->src->index, dest->index);
4646 df_set_bb_dirty (e->src);
4647 redirect_edge_succ (e, dest);
4648 return e;
4651 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4652 in the case the basic block appears to be in sequence. Avoid this
4653 transformation. */
4655 if (e->flags & EDGE_FALLTHRU)
4657 /* Redirect any branch edges unified with the fallthru one. */
4658 if (JUMP_P (BB_END (src))
4659 && label_is_jump_target_p (BB_HEAD (e->dest),
4660 BB_END (src)))
4662 edge redirected;
4664 if (dump_file)
4665 fprintf (dump_file, "Fallthru edge unified with branch "
4666 "%i->%i redirected to %i\n",
4667 e->src->index, e->dest->index, dest->index);
4668 e->flags &= ~EDGE_FALLTHRU;
4669 redirected = redirect_branch_edge (e, dest);
4670 gcc_assert (redirected);
4671 redirected->flags |= EDGE_FALLTHRU;
4672 df_set_bb_dirty (redirected->src);
4673 return redirected;
4675 /* In case we are redirecting fallthru edge to the branch edge
4676 of conditional jump, remove it. */
4677 if (EDGE_COUNT (src->succs) == 2)
4679 /* Find the edge that is different from E. */
4680 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4682 if (s->dest == dest
4683 && any_condjump_p (BB_END (src))
4684 && onlyjump_p (BB_END (src)))
4685 delete_insn (BB_END (src));
4687 if (dump_file)
4688 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4689 e->src->index, e->dest->index, dest->index);
4690 ret = redirect_edge_succ_nodup (e, dest);
4692 else
4693 ret = redirect_branch_edge (e, dest);
4695 if (!ret)
4696 return NULL;
4698 fixup_partition_crossing (ret);
4699 /* We don't want simplejumps in the insn stream during cfglayout. */
4700 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4702 df_set_bb_dirty (src);
4703 return ret;
4706 /* Simple wrapper as we always can redirect fallthru edges. */
4707 static basic_block
4708 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4710 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4712 gcc_assert (redirected);
4713 return NULL;
4716 /* Same as delete_basic_block but update cfg_layout structures. */
4718 static void
4719 cfg_layout_delete_block (basic_block bb)
4721 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4722 rtx_insn **to;
4724 if (BB_HEADER (bb))
4726 next = BB_HEAD (bb);
4727 if (prev)
4728 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4729 else
4730 set_first_insn (BB_HEADER (bb));
4731 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4732 insn = BB_HEADER (bb);
4733 while (NEXT_INSN (insn))
4734 insn = NEXT_INSN (insn);
4735 SET_NEXT_INSN (insn) = next;
4736 SET_PREV_INSN (next) = insn;
4738 next = NEXT_INSN (BB_END (bb));
4739 if (BB_FOOTER (bb))
4741 insn = BB_FOOTER (bb);
4742 while (insn)
4744 if (BARRIER_P (insn))
4746 if (PREV_INSN (insn))
4747 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4748 else
4749 BB_FOOTER (bb) = NEXT_INSN (insn);
4750 if (NEXT_INSN (insn))
4751 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4753 if (LABEL_P (insn))
4754 break;
4755 insn = NEXT_INSN (insn);
4757 if (BB_FOOTER (bb))
4759 insn = BB_END (bb);
4760 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4761 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4762 while (NEXT_INSN (insn))
4763 insn = NEXT_INSN (insn);
4764 SET_NEXT_INSN (insn) = next;
4765 if (next)
4766 SET_PREV_INSN (next) = insn;
4767 else
4768 set_last_insn (insn);
4771 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4772 to = &BB_HEADER (bb->next_bb);
4773 else
4774 to = &cfg_layout_function_footer;
4776 rtl_delete_block (bb);
4778 if (prev)
4779 prev = NEXT_INSN (prev);
4780 else
4781 prev = get_insns ();
4782 if (next)
4783 next = PREV_INSN (next);
4784 else
4785 next = get_last_insn ();
4787 if (next && NEXT_INSN (next) != prev)
4789 remaints = unlink_insn_chain (prev, next);
4790 insn = remaints;
4791 while (NEXT_INSN (insn))
4792 insn = NEXT_INSN (insn);
4793 SET_NEXT_INSN (insn) = *to;
4794 if (*to)
4795 SET_PREV_INSN (*to) = insn;
4796 *to = remaints;
4800 /* Return true when blocks A and B can be safely merged. */
4802 static bool
4803 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4805 /* If we are partitioning hot/cold basic blocks, we don't want to
4806 mess up unconditional or indirect jumps that cross between hot
4807 and cold sections.
4809 Basic block partitioning may result in some jumps that appear to
4810 be optimizable (or blocks that appear to be mergeable), but which really
4811 must be left untouched (they are required to make it safely across
4812 partition boundaries). See the comments at the top of
4813 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
4815 if (BB_PARTITION (a) != BB_PARTITION (b))
4816 return false;
4818 /* Protect the loop latches. */
4819 if (current_loops && b->loop_father->latch == b)
4820 return false;
4822 /* If we would end up moving B's instructions, make sure it doesn't fall
4823 through into the exit block, since we cannot recover from a fallthrough
4824 edge into the exit block occurring in the middle of a function. */
4825 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4827 edge e = find_fallthru_edge (b->succs);
4828 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4829 return false;
4832 /* There must be exactly one edge in between the blocks. */
4833 return (single_succ_p (a)
4834 && single_succ (a) == b
4835 && single_pred_p (b) == 1
4836 && a != b
4837 /* Must be simple edge. */
4838 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4839 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4840 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4841 /* If the jump insn has side effects, we can't kill the edge.
4842 When not optimizing, try_redirect_by_replacing_jump will
4843 not allow us to redirect an edge by replacing a table jump. */
4844 && (!JUMP_P (BB_END (a))
4845 || ((!optimize || reload_completed)
4846 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4849 /* Merge block A and B. The blocks must be mergeable. */
4851 static void
4852 cfg_layout_merge_blocks (basic_block a, basic_block b)
4854 /* If B is a forwarder block whose outgoing edge has no location, we'll
4855 propagate the locus of the edge between A and B onto it. */
4856 const bool forward_edge_locus
4857 = (b->flags & BB_FORWARDER_BLOCK) != 0
4858 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4859 rtx_insn *insn;
4861 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4863 if (dump_file)
4864 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4865 a->index);
4867 /* If there was a CODE_LABEL beginning B, delete it. */
4868 if (LABEL_P (BB_HEAD (b)))
4870 delete_insn (BB_HEAD (b));
4873 /* We should have fallthru edge in a, or we can do dummy redirection to get
4874 it cleaned up. */
4875 if (JUMP_P (BB_END (a)))
4876 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4877 gcc_assert (!JUMP_P (BB_END (a)));
4879 /* If not optimizing, preserve the locus of the single edge between
4880 blocks A and B if necessary by emitting a nop. */
4881 if (!optimize
4882 && !forward_edge_locus
4883 && !DECL_IGNORED_P (current_function_decl))
4884 emit_nop_for_unique_locus_between (a, b);
4886 /* Move things from b->footer after a->footer. */
4887 if (BB_FOOTER (b))
4889 if (!BB_FOOTER (a))
4890 BB_FOOTER (a) = BB_FOOTER (b);
4891 else
4893 rtx_insn *last = BB_FOOTER (a);
4895 while (NEXT_INSN (last))
4896 last = NEXT_INSN (last);
4897 SET_NEXT_INSN (last) = BB_FOOTER (b);
4898 SET_PREV_INSN (BB_FOOTER (b)) = last;
4900 BB_FOOTER (b) = NULL;
4903 /* Move things from b->header before a->footer.
4904 Note that this may include dead tablejump data, but we don't clean
4905 those up until we go out of cfglayout mode. */
4906 if (BB_HEADER (b))
4908 if (! BB_FOOTER (a))
4909 BB_FOOTER (a) = BB_HEADER (b);
4910 else
4912 rtx_insn *last = BB_HEADER (b);
4914 while (NEXT_INSN (last))
4915 last = NEXT_INSN (last);
4916 SET_NEXT_INSN (last) = BB_FOOTER (a);
4917 SET_PREV_INSN (BB_FOOTER (a)) = last;
4918 BB_FOOTER (a) = BB_HEADER (b);
4920 BB_HEADER (b) = NULL;
4923 /* In the case basic blocks are not adjacent, move them around. */
4924 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4926 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4928 emit_insn_after_noloc (insn, BB_END (a), a);
4930 /* Otherwise just re-associate the instructions. */
4931 else
4933 insn = BB_HEAD (b);
4934 BB_END (a) = BB_END (b);
4937 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4938 We need to explicitly call. */
4939 update_bb_for_insn_chain (insn, BB_END (b), a);
4941 /* Skip possible DELETED_LABEL insn. */
4942 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4943 insn = NEXT_INSN (insn);
4944 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4945 BB_HEAD (b) = BB_END (b) = NULL;
4946 delete_insn (insn);
4948 df_bb_delete (b->index);
4950 if (forward_edge_locus)
4951 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4953 if (dump_file)
4954 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4957 /* Split edge E. */
4959 static basic_block
4960 cfg_layout_split_edge (edge e)
4962 basic_block new_bb =
4963 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4964 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4965 NULL_RTX, e->src);
4967 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4968 BB_COPY_PARTITION (new_bb, e->src);
4969 else
4970 BB_COPY_PARTITION (new_bb, e->dest);
4971 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4972 redirect_edge_and_branch_force (e, new_bb);
4974 return new_bb;
4977 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4979 static void
4980 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4984 /* Return true if BB contains only labels or non-executable
4985 instructions. */
4987 static bool
4988 rtl_block_empty_p (basic_block bb)
4990 rtx_insn *insn;
4992 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4993 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4994 return true;
4996 FOR_BB_INSNS (bb, insn)
4997 if (NONDEBUG_INSN_P (insn)
4998 && (!any_uncondjump_p (insn) || !onlyjump_p (insn)))
4999 return false;
5001 return true;
5004 /* Split a basic block if it ends with a conditional branch and if
5005 the other part of the block is not empty. */
5007 static basic_block
5008 rtl_split_block_before_cond_jump (basic_block bb)
5010 rtx_insn *insn;
5011 rtx_insn *split_point = NULL;
5012 rtx_insn *last = NULL;
5013 bool found_code = false;
5015 FOR_BB_INSNS (bb, insn)
5017 if (any_condjump_p (insn))
5018 split_point = last;
5019 else if (NONDEBUG_INSN_P (insn))
5020 found_code = true;
5021 last = insn;
5024 /* Did not find everything. */
5025 if (found_code && split_point)
5026 return split_block (bb, split_point)->dest;
5027 else
5028 return NULL;
5031 /* Return true if BB ends with a call, possibly followed by some
5032 instructions that must stay with the call, false otherwise. */
5034 static bool
5035 rtl_block_ends_with_call_p (basic_block bb)
5037 rtx_insn *insn = BB_END (bb);
5039 while (!CALL_P (insn)
5040 && insn != BB_HEAD (bb)
5041 && (keep_with_call_p (insn)
5042 || NOTE_P (insn)
5043 || DEBUG_INSN_P (insn)))
5044 insn = PREV_INSN (insn);
5045 return (CALL_P (insn));
5048 /* Return true if BB ends with a conditional branch, false otherwise. */
5050 static bool
5051 rtl_block_ends_with_condjump_p (const_basic_block bb)
5053 return any_condjump_p (BB_END (bb));
5056 /* Return true if we need to add fake edge to exit.
5057 Helper function for rtl_flow_call_edges_add. */
5059 static bool
5060 need_fake_edge_p (const rtx_insn *insn)
5062 if (!INSN_P (insn))
5063 return false;
5065 if ((CALL_P (insn)
5066 && !SIBLING_CALL_P (insn)
5067 && !find_reg_note (insn, REG_NORETURN, NULL)
5068 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
5069 return true;
5071 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5072 && MEM_VOLATILE_P (PATTERN (insn)))
5073 || (GET_CODE (PATTERN (insn)) == PARALLEL
5074 && asm_noperands (insn) != -1
5075 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
5076 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
5079 /* Add fake edges to the function exit for any non constant and non noreturn
5080 calls, volatile inline assembly in the bitmap of blocks specified by
5081 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
5082 that were split.
5084 The goal is to expose cases in which entering a basic block does not imply
5085 that all subsequent instructions must be executed. */
5087 static int
5088 rtl_flow_call_edges_add (sbitmap blocks)
5090 int i;
5091 int blocks_split = 0;
5092 int last_bb = last_basic_block_for_fn (cfun);
5093 bool check_last_block = false;
5095 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
5096 return 0;
5098 if (! blocks)
5099 check_last_block = true;
5100 else
5101 check_last_block = bitmap_bit_p (blocks,
5102 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
5104 /* In the last basic block, before epilogue generation, there will be
5105 a fallthru edge to EXIT. Special care is required if the last insn
5106 of the last basic block is a call because make_edge folds duplicate
5107 edges, which would result in the fallthru edge also being marked
5108 fake, which would result in the fallthru edge being removed by
5109 remove_fake_edges, which would result in an invalid CFG.
5111 Moreover, we can't elide the outgoing fake edge, since the block
5112 profiler needs to take this into account in order to solve the minimal
5113 spanning tree in the case that the call doesn't return.
5115 Handle this by adding a dummy instruction in a new last basic block. */
5116 if (check_last_block)
5118 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5119 rtx_insn *insn = BB_END (bb);
5121 /* Back up past insns that must be kept in the same block as a call. */
5122 while (insn != BB_HEAD (bb)
5123 && keep_with_call_p (insn))
5124 insn = PREV_INSN (insn);
5126 if (need_fake_edge_p (insn))
5128 edge e;
5130 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5131 if (e)
5133 insert_insn_on_edge (gen_use (const0_rtx), e);
5134 commit_edge_insertions ();
5139 /* Now add fake edges to the function exit for any non constant
5140 calls since there is no way that we can determine if they will
5141 return or not... */
5143 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5145 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5146 rtx_insn *insn;
5147 rtx_insn *prev_insn;
5149 if (!bb)
5150 continue;
5152 if (blocks && !bitmap_bit_p (blocks, i))
5153 continue;
5155 for (insn = BB_END (bb); ; insn = prev_insn)
5157 prev_insn = PREV_INSN (insn);
5158 if (need_fake_edge_p (insn))
5160 edge e;
5161 rtx_insn *split_at_insn = insn;
5163 /* Don't split the block between a call and an insn that should
5164 remain in the same block as the call. */
5165 if (CALL_P (insn))
5166 while (split_at_insn != BB_END (bb)
5167 && keep_with_call_p (NEXT_INSN (split_at_insn)))
5168 split_at_insn = NEXT_INSN (split_at_insn);
5170 /* The handling above of the final block before the epilogue
5171 should be enough to verify that there is no edge to the exit
5172 block in CFG already. Calling make_edge in such case would
5173 cause us to mark that edge as fake and remove it later. */
5175 if (flag_checking && split_at_insn == BB_END (bb))
5177 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5178 gcc_assert (e == NULL);
5181 /* Note that the following may create a new basic block
5182 and renumber the existing basic blocks. */
5183 if (split_at_insn != BB_END (bb))
5185 e = split_block (bb, split_at_insn);
5186 if (e)
5187 blocks_split++;
5190 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5191 ne->probability = profile_probability::guessed_never ();
5194 if (insn == BB_HEAD (bb))
5195 break;
5199 if (blocks_split)
5200 verify_flow_info ();
5202 return blocks_split;
5205 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5206 the conditional branch target, SECOND_HEAD should be the fall-thru
5207 there is no need to handle this here the loop versioning code handles
5208 this. the reason for SECON_HEAD is that it is needed for condition
5209 in trees, and this should be of the same type since it is a hook. */
5210 static void
5211 rtl_lv_add_condition_to_bb (basic_block first_head ,
5212 basic_block second_head ATTRIBUTE_UNUSED,
5213 basic_block cond_bb, void *comp_rtx)
5215 rtx_code_label *label;
5216 rtx_insn *seq, *jump;
5217 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5218 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5219 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5220 machine_mode mode;
5223 label = block_label (first_head);
5224 mode = GET_MODE (op0);
5225 if (mode == VOIDmode)
5226 mode = GET_MODE (op1);
5228 start_sequence ();
5229 op0 = force_operand (op0, NULL_RTX);
5230 op1 = force_operand (op1, NULL_RTX);
5231 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5232 profile_probability::uninitialized ());
5233 jump = get_last_insn ();
5234 JUMP_LABEL (jump) = label;
5235 LABEL_NUSES (label)++;
5236 seq = get_insns ();
5237 end_sequence ();
5239 /* Add the new cond, in the new head. */
5240 emit_insn_after (seq, BB_END (cond_bb));
5244 /* Given a block B with unconditional branch at its end, get the
5245 store the return the branch edge and the fall-thru edge in
5246 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5247 static void
5248 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5249 edge *fallthru_edge)
5251 edge e = EDGE_SUCC (b, 0);
5253 if (e->flags & EDGE_FALLTHRU)
5255 *fallthru_edge = e;
5256 *branch_edge = EDGE_SUCC (b, 1);
5258 else
5260 *branch_edge = e;
5261 *fallthru_edge = EDGE_SUCC (b, 1);
5265 void
5266 init_rtl_bb_info (basic_block bb)
5268 gcc_assert (!bb->il.x.rtl);
5269 bb->il.x.head_ = NULL;
5270 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5273 static bool
5274 rtl_bb_info_initialized_p (basic_block bb)
5276 return bb->il.x.rtl;
5279 /* Returns true if it is possible to remove edge E by redirecting
5280 it to the destination of the other edge from E->src. */
5282 static bool
5283 rtl_can_remove_branch_p (const_edge e)
5285 const_basic_block src = e->src;
5286 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5287 const rtx_insn *insn = BB_END (src);
5288 rtx set;
5290 /* The conditions are taken from try_redirect_by_replacing_jump. */
5291 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5292 return false;
5294 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5295 return false;
5297 if (BB_PARTITION (src) != BB_PARTITION (target))
5298 return false;
5300 if (!onlyjump_p (insn)
5301 || tablejump_p (insn, NULL, NULL))
5302 return false;
5304 set = single_set (insn);
5305 if (!set || side_effects_p (set))
5306 return false;
5308 return true;
5311 static basic_block
5312 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5314 bb = cfg_layout_duplicate_bb (bb, id);
5315 bb->aux = NULL;
5316 return bb;
5319 /* Do book-keeping of basic block BB for the profile consistency checker.
5320 Store the counting in RECORD. */
5321 static void
5322 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5324 rtx_insn *insn;
5325 FOR_BB_INSNS (bb, insn)
5326 if (INSN_P (insn))
5328 record->size += insn_cost (insn, false);
5329 if (profile_info)
5331 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
5332 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
5333 && bb->count.ipa ().initialized_p ())
5334 record->time
5335 += insn_cost (insn, true) * bb->count.ipa ().to_gcov_type ();
5337 else if (bb->count.initialized_p ()
5338 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5339 record->time
5340 += insn_cost (insn, true)
5341 * bb->count.to_sreal_scale
5342 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
5343 else
5344 record->time += insn_cost (insn, true);
5348 /* Implementation of CFG manipulation for linearized RTL. */
5349 struct cfg_hooks rtl_cfg_hooks = {
5350 "rtl",
5351 rtl_verify_flow_info,
5352 rtl_dump_bb,
5353 rtl_dump_bb_for_graph,
5354 rtl_create_basic_block,
5355 rtl_redirect_edge_and_branch,
5356 rtl_redirect_edge_and_branch_force,
5357 rtl_can_remove_branch_p,
5358 rtl_delete_block,
5359 rtl_split_block,
5360 rtl_move_block_after,
5361 rtl_can_merge_blocks, /* can_merge_blocks_p */
5362 rtl_merge_blocks,
5363 rtl_predict_edge,
5364 rtl_predicted_by_p,
5365 cfg_layout_can_duplicate_bb_p,
5366 rtl_duplicate_bb,
5367 rtl_split_edge,
5368 rtl_make_forwarder_block,
5369 rtl_tidy_fallthru_edge,
5370 rtl_force_nonfallthru,
5371 rtl_block_ends_with_call_p,
5372 rtl_block_ends_with_condjump_p,
5373 rtl_flow_call_edges_add,
5374 NULL, /* execute_on_growing_pred */
5375 NULL, /* execute_on_shrinking_pred */
5376 NULL, /* duplicate loop for trees */
5377 NULL, /* lv_add_condition_to_bb */
5378 NULL, /* lv_adjust_loop_header_phi*/
5379 NULL, /* extract_cond_bb_edges */
5380 NULL, /* flush_pending_stmts */
5381 rtl_block_empty_p, /* block_empty_p */
5382 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5383 rtl_account_profile_record,
5386 /* Implementation of CFG manipulation for cfg layout RTL, where
5387 basic block connected via fallthru edges does not have to be adjacent.
5388 This representation will hopefully become the default one in future
5389 version of the compiler. */
5391 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5392 "cfglayout mode",
5393 rtl_verify_flow_info_1,
5394 rtl_dump_bb,
5395 rtl_dump_bb_for_graph,
5396 cfg_layout_create_basic_block,
5397 cfg_layout_redirect_edge_and_branch,
5398 cfg_layout_redirect_edge_and_branch_force,
5399 rtl_can_remove_branch_p,
5400 cfg_layout_delete_block,
5401 cfg_layout_split_block,
5402 rtl_move_block_after,
5403 cfg_layout_can_merge_blocks_p,
5404 cfg_layout_merge_blocks,
5405 rtl_predict_edge,
5406 rtl_predicted_by_p,
5407 cfg_layout_can_duplicate_bb_p,
5408 cfg_layout_duplicate_bb,
5409 cfg_layout_split_edge,
5410 rtl_make_forwarder_block,
5411 NULL, /* tidy_fallthru_edge */
5412 rtl_force_nonfallthru,
5413 rtl_block_ends_with_call_p,
5414 rtl_block_ends_with_condjump_p,
5415 rtl_flow_call_edges_add,
5416 NULL, /* execute_on_growing_pred */
5417 NULL, /* execute_on_shrinking_pred */
5418 duplicate_loop_body_to_header_edge, /* duplicate loop for rtl */
5419 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5420 NULL, /* lv_adjust_loop_header_phi*/
5421 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5422 NULL, /* flush_pending_stmts */
5423 rtl_block_empty_p, /* block_empty_p */
5424 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5425 rtl_account_profile_record,
5428 #include "gt-cfgrtl.h"
5430 #if __GNUC__ >= 10
5431 # pragma GCC diagnostic pop
5432 #endif