c++: retval dtor on rethrow [PR112301]
[official-gcc.git] / gcc / cfgrtl.cc
blobabcb472e2a2d750befbd0be9a44fe39f21d25293
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 #include "rtl-iter.h"
65 #include "gimplify.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* Disable warnings about missing quoting in GCC diagnostics. */
70 #if __GNUC__ >= 10
71 # pragma GCC diagnostic push
72 # pragma GCC diagnostic ignored "-Wformat-diag"
73 #endif
75 /* Holds the interesting leading and trailing notes for the function.
76 Only applicable if the CFG is in cfglayout mode. */
77 static GTY(()) rtx_insn *cfg_layout_function_footer;
78 static GTY(()) rtx_insn *cfg_layout_function_header;
80 static rtx_insn *skip_insns_after_block (basic_block);
81 static void record_effective_endpoints (void);
82 static void fixup_reorder_chain (void);
84 void verify_insn_chain (void);
85 static void fixup_fallthru_exit_predecessor (void);
86 static bool can_delete_note_p (const rtx_note *);
87 static bool can_delete_label_p (const rtx_code_label *);
88 static basic_block rtl_split_edge (edge);
89 static bool rtl_move_block_after (basic_block, basic_block);
90 static bool rtl_verify_flow_info (void);
91 static basic_block cfg_layout_split_block (basic_block, void *);
92 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
93 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
94 static void cfg_layout_delete_block (basic_block);
95 static void rtl_delete_block (basic_block);
96 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
97 static edge rtl_redirect_edge_and_branch (edge, basic_block);
98 static basic_block rtl_split_block (basic_block, void *);
99 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
100 static bool rtl_verify_flow_info_1 (void);
101 static void rtl_make_forwarder_block (edge);
102 static bool rtl_bb_info_initialized_p (basic_block bb);
104 /* Return true if NOTE is not one of the ones that must be kept paired,
105 so that we may simply delete it. */
107 static bool
108 can_delete_note_p (const rtx_note *note)
110 switch (NOTE_KIND (note))
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_BASIC_BLOCK:
114 case NOTE_INSN_EPILOGUE_BEG:
115 return true;
117 default:
118 return false;
122 /* True if a given label can be deleted. */
124 static bool
125 can_delete_label_p (const rtx_code_label *label)
127 return (!LABEL_PRESERVE_P (label)
128 /* User declared labels must be preserved. */
129 && LABEL_NAME (label) == 0
130 && !vec_safe_contains<rtx_insn *> (forced_labels,
131 const_cast<rtx_code_label *> (label)));
134 /* Delete INSN by patching it out. */
136 void
137 delete_insn (rtx_insn *insn)
139 rtx note;
140 bool really_delete = true;
142 if (LABEL_P (insn))
144 /* Some labels can't be directly removed from the INSN chain, as they
145 might be references via variables, constant pool etc.
146 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
147 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
149 const char *name = LABEL_NAME (insn);
150 basic_block bb = BLOCK_FOR_INSN (insn);
151 rtx_insn *bb_note = NEXT_INSN (insn);
153 really_delete = false;
154 PUT_CODE (insn, NOTE);
155 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
156 NOTE_DELETED_LABEL_NAME (insn) = name;
158 /* If the note following the label starts a basic block, and the
159 label is a member of the same basic block, interchange the two. */
160 if (bb_note != NULL_RTX
161 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
162 && bb != NULL
163 && bb == BLOCK_FOR_INSN (bb_note))
165 reorder_insns_nobb (insn, insn, bb_note);
166 BB_HEAD (bb) = bb_note;
167 if (BB_END (bb) == bb_note)
168 BB_END (bb) = insn;
172 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
175 if (really_delete)
177 /* If this insn has already been deleted, something is very wrong. */
178 gcc_assert (!insn->deleted ());
179 if (INSN_P (insn))
180 df_insn_delete (insn);
181 remove_insn (insn);
182 insn->set_deleted ();
185 /* If deleting a jump, decrement the use count of the label. Deleting
186 the label itself should happen in the normal course of block merging. */
187 if (JUMP_P (insn))
189 if (JUMP_LABEL (insn)
190 && LABEL_P (JUMP_LABEL (insn)))
191 LABEL_NUSES (JUMP_LABEL (insn))--;
193 /* If there are more targets, remove them too. */
194 while ((note
195 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
196 && LABEL_P (XEXP (note, 0)))
198 LABEL_NUSES (XEXP (note, 0))--;
199 remove_note (insn, note);
203 /* Also if deleting any insn that references a label as an operand. */
204 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
205 && LABEL_P (XEXP (note, 0)))
207 LABEL_NUSES (XEXP (note, 0))--;
208 remove_note (insn, note);
211 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
213 rtvec vec = table->get_labels ();
214 int len = GET_NUM_ELEM (vec);
215 int i;
217 for (i = 0; i < len; i++)
219 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
221 /* When deleting code in bulk (e.g. removing many unreachable
222 blocks) we can delete a label that's a target of the vector
223 before deleting the vector itself. */
224 if (!NOTE_P (label))
225 LABEL_NUSES (label)--;
230 /* Like delete_insn but also purge dead edges from BB.
231 Return true if any edges are eliminated. */
233 bool
234 delete_insn_and_edges (rtx_insn *insn)
236 bool purge = false;
238 if (NONDEBUG_INSN_P (insn) && BLOCK_FOR_INSN (insn))
240 basic_block bb = BLOCK_FOR_INSN (insn);
241 if (BB_END (bb) == insn)
242 purge = true;
243 else if (DEBUG_INSN_P (BB_END (bb)))
244 for (rtx_insn *dinsn = NEXT_INSN (insn);
245 DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
246 if (BB_END (bb) == dinsn)
248 purge = true;
249 break;
252 delete_insn (insn);
253 if (purge)
254 return purge_dead_edges (BLOCK_FOR_INSN (insn));
255 return false;
258 /* Unlink a chain of insns between START and FINISH, leaving notes
259 that must be paired. If CLEAR_BB is true, we set bb field for
260 insns that cannot be removed to NULL. */
262 void
263 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
265 /* Unchain the insns one by one. It would be quicker to delete all of these
266 with a single unchaining, rather than one at a time, but we need to keep
267 the NOTE's. */
268 rtx_insn *current = finish;
269 while (1)
271 rtx_insn *prev = PREV_INSN (current);
272 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
274 else
275 delete_insn (current);
277 if (clear_bb && !current->deleted ())
278 set_block_for_insn (current, NULL);
280 if (current == start)
281 break;
282 current = prev;
286 /* Create a new basic block consisting of the instructions between HEAD and END
287 inclusive. This function is designed to allow fast BB construction - reuses
288 the note and basic block struct in BB_NOTE, if any and do not grow
289 BASIC_BLOCK chain and should be used directly only by CFG construction code.
290 END can be NULL in to create new empty basic block before HEAD. Both END
291 and HEAD can be NULL to create basic block at the end of INSN chain.
292 AFTER is the basic block we should be put after. */
294 basic_block
295 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
296 basic_block after)
298 basic_block bb;
300 if (bb_note
301 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
302 && bb->aux == NULL)
304 /* If we found an existing note, thread it back onto the chain. */
306 rtx_insn *after;
308 if (LABEL_P (head))
309 after = head;
310 else
312 after = PREV_INSN (head);
313 head = bb_note;
316 if (after != bb_note && NEXT_INSN (after) != bb_note)
317 reorder_insns_nobb (bb_note, bb_note, after);
319 else
321 /* Otherwise we must create a note and a basic block structure. */
323 bb = alloc_block ();
325 init_rtl_bb_info (bb);
326 if (!head && !end)
327 head = end = bb_note
328 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
329 else if (LABEL_P (head) && end)
331 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
332 if (head == end)
333 end = bb_note;
335 else
337 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
338 head = bb_note;
339 if (!end)
340 end = head;
343 NOTE_BASIC_BLOCK (bb_note) = bb;
346 /* Always include the bb note in the block. */
347 if (NEXT_INSN (end) == bb_note)
348 end = bb_note;
350 BB_HEAD (bb) = head;
351 BB_END (bb) = end;
352 bb->index = last_basic_block_for_fn (cfun)++;
353 bb->flags = BB_NEW | BB_RTL;
354 link_block (bb, after);
355 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
356 df_bb_refs_record (bb->index, false);
357 update_bb_for_insn (bb);
358 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
360 /* Tag the block so that we know it has been used when considering
361 other basic block notes. */
362 bb->aux = bb;
364 return bb;
367 /* Create new basic block consisting of instructions in between HEAD and END
368 and place it to the BB chain after block AFTER. END can be NULL to
369 create a new empty basic block before HEAD. Both END and HEAD can be
370 NULL to create basic block at the end of INSN chain. */
372 static basic_block
373 rtl_create_basic_block (void *headp, void *endp, basic_block after)
375 rtx_insn *head = (rtx_insn *) headp;
376 rtx_insn *end = (rtx_insn *) endp;
377 basic_block bb;
379 /* Grow the basic block array if needed. */
380 if ((size_t) last_basic_block_for_fn (cfun)
381 >= basic_block_info_for_fn (cfun)->length ())
382 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
383 last_basic_block_for_fn (cfun) + 1);
385 n_basic_blocks_for_fn (cfun)++;
387 bb = create_basic_block_structure (head, end, NULL, after);
388 bb->aux = NULL;
389 return bb;
392 static basic_block
393 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
395 basic_block newbb = rtl_create_basic_block (head, end, after);
397 return newbb;
400 /* Delete the insns in a (non-live) block. We physically delete every
401 non-deleted-note insn, and update the flow graph appropriately.
403 Return nonzero if we deleted an exception handler. */
405 /* ??? Preserving all such notes strikes me as wrong. It would be nice
406 to post-process the stream to remove empty blocks, loops, ranges, etc. */
408 static void
409 rtl_delete_block (basic_block b)
411 rtx_insn *insn, *end;
413 /* If the head of this block is a CODE_LABEL, then it might be the
414 label for an exception handler which can't be reached. We need
415 to remove the label from the exception_handler_label list. */
416 insn = BB_HEAD (b);
418 end = get_last_bb_insn (b);
420 /* Selectively delete the entire chain. */
421 BB_HEAD (b) = NULL;
422 delete_insn_chain (insn, end, true);
425 if (dump_file)
426 fprintf (dump_file, "deleting block %d\n", b->index);
427 df_bb_delete (b->index);
430 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
432 void
433 compute_bb_for_insn (void)
435 basic_block bb;
437 FOR_EACH_BB_FN (bb, cfun)
439 rtx_insn *end = BB_END (bb);
440 rtx_insn *insn;
442 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
444 BLOCK_FOR_INSN (insn) = bb;
445 if (insn == end)
446 break;
451 /* Release the basic_block_for_insn array. */
453 void
454 free_bb_for_insn (void)
456 rtx_insn *insn;
457 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
458 if (!BARRIER_P (insn))
459 BLOCK_FOR_INSN (insn) = NULL;
462 namespace {
464 const pass_data pass_data_free_cfg =
466 RTL_PASS, /* type */
467 "*free_cfg", /* name */
468 OPTGROUP_NONE, /* optinfo_flags */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
477 class pass_free_cfg : public rtl_opt_pass
479 public:
480 pass_free_cfg (gcc::context *ctxt)
481 : rtl_opt_pass (pass_data_free_cfg, ctxt)
484 /* opt_pass methods: */
485 unsigned int execute (function *) final override;
487 }; // class pass_free_cfg
489 unsigned int
490 pass_free_cfg::execute (function *)
492 /* The resource.cc machinery uses DF but the CFG isn't guaranteed to be
493 valid at that point so it would be too late to call df_analyze. */
494 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
496 df_note_add_problem ();
497 df_analyze ();
500 if (crtl->has_bb_partition)
501 insert_section_boundary_note ();
503 free_bb_for_insn ();
504 return 0;
507 } // anon namespace
509 rtl_opt_pass *
510 make_pass_free_cfg (gcc::context *ctxt)
512 return new pass_free_cfg (ctxt);
515 /* Return RTX to emit after when we want to emit code on the entry of function. */
516 rtx_insn *
517 entry_of_function (void)
519 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
520 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
523 /* Emit INSN at the entry point of the function, ensuring that it is only
524 executed once per function. */
525 void
526 emit_insn_at_entry (rtx insn)
528 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
529 edge e = ei_safe_edge (ei);
530 gcc_assert (e->flags & EDGE_FALLTHRU);
532 insert_insn_on_edge (insn, e);
533 commit_edge_insertions ();
536 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
537 (or BARRIER if found) and notify df of the bb change.
538 The insn chain range is inclusive
539 (i.e. both BEGIN and END will be updated. */
541 static void
542 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
544 rtx_insn *insn;
546 end = NEXT_INSN (end);
547 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
548 if (!BARRIER_P (insn))
549 df_insn_change_bb (insn, bb);
552 /* Update BLOCK_FOR_INSN of insns in BB to BB,
553 and notify df of the change. */
555 void
556 update_bb_for_insn (basic_block bb)
558 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
562 /* Like active_insn_p, except keep the return value use or clobber around
563 even after reload. */
565 static bool
566 flow_active_insn_p (const rtx_insn *insn)
568 if (active_insn_p (insn))
569 return true;
571 /* A clobber of the function return value exists for buggy
572 programs that fail to return a value. Its effect is to
573 keep the return value from being live across the entire
574 function. If we allow it to be skipped, we introduce the
575 possibility for register lifetime confusion.
576 Similarly, keep a USE of the function return value, otherwise
577 the USE is dropped and we could fail to thread jump if USE
578 appears on some paths and not on others, see PR90257. */
579 if ((GET_CODE (PATTERN (insn)) == CLOBBER
580 || GET_CODE (PATTERN (insn)) == USE)
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
585 return false;
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
591 bool
592 contains_no_active_insn_p (const_basic_block bb)
594 rtx_insn *insn;
596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
597 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
598 || !single_succ_p (bb)
599 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
600 return false;
602 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
603 if (INSN_P (insn) && flow_active_insn_p (insn))
604 return false;
606 return (!INSN_P (insn)
607 || (JUMP_P (insn) && simplejump_p (insn))
608 || !flow_active_insn_p (insn));
611 /* Likewise, but protect loop latches, headers and preheaders. */
612 /* FIXME: Make this a cfg hook. */
614 bool
615 forwarder_block_p (const_basic_block bb)
617 if (!contains_no_active_insn_p (bb))
618 return false;
620 /* Protect loop latches, headers and preheaders. */
621 if (current_loops)
623 basic_block dest;
624 if (bb->loop_father->header == bb)
625 return false;
626 dest = EDGE_SUCC (bb, 0)->dest;
627 if (dest->loop_father->header == dest)
628 return false;
631 return true;
634 /* Return nonzero if we can reach target from src by falling through. */
635 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
637 bool
638 can_fallthru (basic_block src, basic_block target)
640 rtx_insn *insn = BB_END (src);
641 rtx_insn *insn2;
642 edge e;
643 edge_iterator ei;
645 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
646 return true;
647 if (src->next_bb != target)
648 return false;
650 /* ??? Later we may add code to move jump tables offline. */
651 if (tablejump_p (insn, NULL, NULL))
652 return false;
654 FOR_EACH_EDGE (e, ei, src->succs)
655 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
656 && e->flags & EDGE_FALLTHRU)
657 return false;
659 insn2 = BB_HEAD (target);
660 if (!active_insn_p (insn2))
661 insn2 = next_active_insn (insn2);
663 return next_active_insn (insn) == insn2;
666 /* Return nonzero if we could reach target from src by falling through,
667 if the target was made adjacent. If we already have a fall-through
668 edge to the exit block, we can't do that. */
669 static bool
670 could_fall_through (basic_block src, basic_block target)
672 edge e;
673 edge_iterator ei;
675 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
676 return true;
677 FOR_EACH_EDGE (e, ei, src->succs)
678 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
679 && e->flags & EDGE_FALLTHRU)
680 return 0;
681 return true;
684 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
685 rtx_note *
686 bb_note (basic_block bb)
688 rtx_insn *note;
690 note = BB_HEAD (bb);
691 if (LABEL_P (note))
692 note = NEXT_INSN (note);
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
695 return as_a <rtx_note *> (note);
698 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
699 note associated with the BLOCK. */
701 static rtx_insn *
702 first_insn_after_basic_block_note (basic_block block)
704 rtx_insn *insn;
706 /* Get the first instruction in the block. */
707 insn = BB_HEAD (block);
709 if (insn == NULL_RTX)
710 return NULL;
711 if (LABEL_P (insn))
712 insn = NEXT_INSN (insn);
713 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
715 return NEXT_INSN (insn);
718 /* Creates a new basic block just after basic block BB by splitting
719 everything after specified instruction INSNP. */
721 static basic_block
722 rtl_split_block (basic_block bb, void *insnp)
724 basic_block new_bb;
725 rtx_insn *insn = (rtx_insn *) insnp;
726 edge e;
727 edge_iterator ei;
729 if (!insn)
731 insn = first_insn_after_basic_block_note (bb);
733 if (insn)
735 rtx_insn *next = insn;
737 insn = PREV_INSN (insn);
739 /* If the block contains only debug insns, insn would have
740 been NULL in a non-debug compilation, and then we'd end
741 up emitting a DELETED note. For -fcompare-debug
742 stability, emit the note too. */
743 if (insn != BB_END (bb)
744 && DEBUG_INSN_P (next)
745 && DEBUG_INSN_P (BB_END (bb)))
747 while (next != BB_END (bb) && DEBUG_INSN_P (next))
748 next = NEXT_INSN (next);
750 if (next == BB_END (bb))
751 emit_note_after (NOTE_INSN_DELETED, next);
754 else
755 insn = get_last_insn ();
758 /* We probably should check type of the insn so that we do not create
759 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
760 bother. */
761 if (insn == BB_END (bb))
762 emit_note_after (NOTE_INSN_DELETED, insn);
764 /* Create the new basic block. */
765 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
766 BB_COPY_PARTITION (new_bb, bb);
767 BB_END (bb) = insn;
769 /* Redirect the outgoing edges. */
770 new_bb->succs = bb->succs;
771 bb->succs = NULL;
772 FOR_EACH_EDGE (e, ei, new_bb->succs)
773 e->src = new_bb;
775 /* The new block starts off being dirty. */
776 df_set_bb_dirty (bb);
777 return new_bb;
780 /* Return true if LOC1 and LOC2 are equivalent for
781 unique_locus_on_edge_between_p purposes. */
783 static bool
784 loc_equal (location_t loc1, location_t loc2)
786 if (loc1 == loc2)
787 return true;
789 expanded_location loce1 = expand_location (loc1);
790 expanded_location loce2 = expand_location (loc2);
792 if (loce1.line != loce2.line
793 || loce1.column != loce2.column
794 || loce1.data != loce2.data)
795 return false;
796 if (loce1.file == loce2.file)
797 return true;
798 return (loce1.file != NULL
799 && loce2.file != NULL
800 && filename_cmp (loce1.file, loce2.file) == 0);
803 /* Return true if the single edge between blocks A and B is the only place
804 in RTL which holds some unique locus. */
806 static bool
807 unique_locus_on_edge_between_p (basic_block a, basic_block b)
809 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
810 rtx_insn *insn, *end;
812 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
813 return false;
815 /* First scan block A backward. */
816 insn = BB_END (a);
817 end = PREV_INSN (BB_HEAD (a));
818 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
819 insn = PREV_INSN (insn);
821 if (insn != end && loc_equal (INSN_LOCATION (insn), goto_locus))
822 return false;
824 /* Then scan block B forward. */
825 insn = BB_HEAD (b);
826 if (insn)
828 end = NEXT_INSN (BB_END (b));
829 while (insn != end && !NONDEBUG_INSN_P (insn))
830 insn = NEXT_INSN (insn);
832 if (insn != end && INSN_HAS_LOCATION (insn)
833 && loc_equal (INSN_LOCATION (insn), goto_locus))
834 return false;
837 return true;
840 /* If the single edge between blocks A and B is the only place in RTL which
841 holds some unique locus, emit a nop with that locus between the blocks. */
843 static void
844 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
846 if (!unique_locus_on_edge_between_p (a, b))
847 return;
849 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
850 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
853 /* Blocks A and B are to be merged into a single block A. The insns
854 are already contiguous. */
856 static void
857 rtl_merge_blocks (basic_block a, basic_block b)
859 /* If B is a forwarder block whose outgoing edge has no location, we'll
860 propagate the locus of the edge between A and B onto it. */
861 const bool forward_edge_locus
862 = (b->flags & BB_FORWARDER_BLOCK) != 0
863 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
864 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
865 rtx_insn *del_first = NULL, *del_last = NULL;
866 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
867 bool b_empty = false;
869 if (dump_file)
870 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
871 a->index);
873 while (DEBUG_INSN_P (b_end))
874 b_end = PREV_INSN (b_debug_start = b_end);
876 /* If there was a CODE_LABEL beginning B, delete it. */
877 if (LABEL_P (b_head))
879 /* Detect basic blocks with nothing but a label. This can happen
880 in particular at the end of a function. */
881 if (b_head == b_end)
882 b_empty = true;
884 del_first = del_last = b_head;
885 b_head = NEXT_INSN (b_head);
888 /* Delete the basic block note and handle blocks containing just that
889 note. */
890 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
892 if (b_head == b_end)
893 b_empty = true;
894 if (! del_last)
895 del_first = b_head;
897 del_last = b_head;
898 b_head = NEXT_INSN (b_head);
901 /* If there was a jump out of A, delete it. */
902 if (JUMP_P (a_end))
904 rtx_insn *prev;
906 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
907 if (!NOTE_P (prev)
908 || NOTE_INSN_BASIC_BLOCK_P (prev)
909 || prev == BB_HEAD (a))
910 break;
912 del_first = a_end;
914 a_end = PREV_INSN (del_first);
916 else if (BARRIER_P (NEXT_INSN (a_end)))
917 del_first = NEXT_INSN (a_end);
919 /* Delete everything marked above as well as crap that might be
920 hanging out between the two blocks. */
921 BB_END (a) = a_end;
922 BB_HEAD (b) = b_empty ? NULL : b_head;
923 delete_insn_chain (del_first, del_last, true);
925 /* If not optimizing, preserve the locus of the single edge between
926 blocks A and B if necessary by emitting a nop. */
927 if (!optimize
928 && !forward_edge_locus
929 && !DECL_IGNORED_P (current_function_decl))
931 emit_nop_for_unique_locus_between (a, b);
932 a_end = BB_END (a);
935 /* Reassociate the insns of B with A. */
936 if (!b_empty)
938 update_bb_for_insn_chain (a_end, b_debug_end, a);
940 BB_END (a) = b_debug_end;
941 BB_HEAD (b) = NULL;
943 else if (b_end != b_debug_end)
945 /* Move any deleted labels and other notes between the end of A
946 and the debug insns that make up B after the debug insns,
947 bringing the debug insns into A while keeping the notes after
948 the end of A. */
949 if (NEXT_INSN (a_end) != b_debug_start)
950 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
951 b_debug_end);
952 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
953 BB_END (a) = b_debug_end;
956 df_bb_delete (b->index);
958 if (forward_edge_locus)
959 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
961 if (dump_file)
962 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
966 /* Return true when block A and B can be merged. */
968 static bool
969 rtl_can_merge_blocks (basic_block a, basic_block b)
971 /* If we are partitioning hot/cold basic blocks, we don't want to
972 mess up unconditional or indirect jumps that cross between hot
973 and cold sections.
975 Basic block partitioning may result in some jumps that appear to
976 be optimizable (or blocks that appear to be mergeable), but which really
977 must be left untouched (they are required to make it safely across
978 partition boundaries). See the comments at the top of
979 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
981 if (BB_PARTITION (a) != BB_PARTITION (b))
982 return false;
984 /* Protect the loop latches. */
985 if (current_loops && b->loop_father->latch == b)
986 return false;
988 /* There must be exactly one edge in between the blocks. */
989 return (single_succ_p (a)
990 && single_succ (a) == b
991 && single_pred_p (b)
992 && a != b
993 /* Must be simple edge. */
994 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
995 && a->next_bb == b
996 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
997 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
998 /* If the jump insn has side effects,
999 we can't kill the edge. */
1000 && (!JUMP_P (BB_END (a))
1001 || (reload_completed
1002 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
1005 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
1006 exist. */
1008 rtx_code_label *
1009 block_label (basic_block block)
1011 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1012 return NULL;
1014 if (!LABEL_P (BB_HEAD (block)))
1016 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1019 return as_a <rtx_code_label *> (BB_HEAD (block));
1022 /* Remove all barriers from BB_FOOTER of a BB. */
1024 static void
1025 remove_barriers_from_footer (basic_block bb)
1027 rtx_insn *insn = BB_FOOTER (bb);
1029 /* Remove barriers but keep jumptables. */
1030 while (insn)
1032 if (BARRIER_P (insn))
1034 if (PREV_INSN (insn))
1035 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1036 else
1037 BB_FOOTER (bb) = NEXT_INSN (insn);
1038 if (NEXT_INSN (insn))
1039 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1041 if (LABEL_P (insn))
1042 return;
1043 insn = NEXT_INSN (insn);
1047 /* Attempt to perform edge redirection by replacing possibly complex jump
1048 instruction by unconditional jump or removing jump completely. This can
1049 apply only if all edges now point to the same block. The parameters and
1050 return values are equivalent to redirect_edge_and_branch. */
1052 edge
1053 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1055 basic_block src = e->src;
1056 rtx_insn *insn = BB_END (src);
1057 rtx set;
1058 bool fallthru = false;
1060 /* If we are partitioning hot/cold basic blocks, we don't want to
1061 mess up unconditional or indirect jumps that cross between hot
1062 and cold sections.
1064 Basic block partitioning may result in some jumps that appear to
1065 be optimizable (or blocks that appear to be mergeable), but which really
1066 must be left untouched (they are required to make it safely across
1067 partition boundaries). See the comments at the top of
1068 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
1070 if (BB_PARTITION (src) != BB_PARTITION (target))
1071 return NULL;
1073 /* We can replace or remove a complex jump only when we have exactly
1074 two edges. Also, if we have exactly one outgoing edge, we can
1075 redirect that. */
1076 if (EDGE_COUNT (src->succs) >= 3
1077 /* Verify that all targets will be TARGET. Specifically, the
1078 edge that is not E must also go to TARGET. */
1079 || (EDGE_COUNT (src->succs) == 2
1080 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1081 return NULL;
1083 if (!onlyjump_p (insn))
1084 return NULL;
1085 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1086 return NULL;
1088 /* Avoid removing branch with side effects. */
1089 set = single_set (insn);
1090 if (!set || side_effects_p (set))
1091 return NULL;
1093 /* See if we can create the fallthru edge. */
1094 if (in_cfglayout || can_fallthru (src, target))
1096 if (dump_file)
1097 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1098 fallthru = true;
1100 /* Selectively unlink whole insn chain. */
1101 if (in_cfglayout)
1103 delete_insn_chain (insn, BB_END (src), false);
1104 remove_barriers_from_footer (src);
1106 else
1107 delete_insn_chain (insn, PREV_INSN (BB_HEAD (target)), false);
1110 /* If this already is simplejump, redirect it. */
1111 else if (simplejump_p (insn))
1113 if (e->dest == target)
1114 return NULL;
1115 if (dump_file)
1116 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1117 INSN_UID (insn), e->dest->index, target->index);
1118 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1119 block_label (target), 0))
1121 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1122 return NULL;
1126 /* Cannot do anything for target exit block. */
1127 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1128 return NULL;
1130 /* Or replace possibly complicated jump insn by simple jump insn. */
1131 else
1133 rtx_code_label *target_label = block_label (target);
1134 rtx_insn *barrier;
1135 rtx_insn *label;
1136 rtx_jump_table_data *table;
1138 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1139 JUMP_LABEL (BB_END (src)) = target_label;
1140 LABEL_NUSES (target_label)++;
1141 if (dump_file)
1142 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1143 INSN_UID (insn), INSN_UID (BB_END (src)));
1146 delete_insn_chain (insn, insn, false);
1148 /* Recognize a tablejump that we are converting to a
1149 simple jump and remove its associated CODE_LABEL
1150 and ADDR_VEC or ADDR_DIFF_VEC. */
1151 if (tablejump_p (insn, &label, &table))
1152 delete_insn_chain (label, table, false);
1154 barrier = next_nonnote_nondebug_insn (BB_END (src));
1155 if (!barrier || !BARRIER_P (barrier))
1156 emit_barrier_after (BB_END (src));
1157 else
1159 if (barrier != NEXT_INSN (BB_END (src)))
1161 /* Move the jump before barrier so that the notes
1162 which originally were or were created before jump table are
1163 inside the basic block. */
1164 rtx_insn *new_insn = BB_END (src);
1166 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1167 PREV_INSN (barrier), src);
1169 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1170 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1172 SET_NEXT_INSN (new_insn) = barrier;
1173 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1175 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1176 SET_PREV_INSN (barrier) = new_insn;
1181 /* Keep only one edge out and set proper flags. */
1182 if (!single_succ_p (src))
1183 remove_edge (e);
1184 gcc_assert (single_succ_p (src));
1186 e = single_succ_edge (src);
1187 if (fallthru)
1188 e->flags = EDGE_FALLTHRU;
1189 else
1190 e->flags = 0;
1192 e->probability = profile_probability::always ();
1194 if (e->dest != target)
1195 redirect_edge_succ (e, target);
1196 return e;
1199 /* Subroutine of redirect_branch_edge that tries to patch the jump
1200 instruction INSN so that it reaches block NEW. Do this
1201 only when it originally reached block OLD. Return true if this
1202 worked or the original target wasn't OLD, return false if redirection
1203 doesn't work. */
1205 static bool
1206 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1208 rtx_jump_table_data *table;
1209 rtx tmp;
1210 /* Recognize a tablejump and adjust all matching cases. */
1211 if (tablejump_p (insn, NULL, &table))
1213 rtvec vec;
1214 int j;
1215 rtx_code_label *new_label = block_label (new_bb);
1217 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1218 return false;
1219 vec = table->get_labels ();
1221 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1222 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1224 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1225 --LABEL_NUSES (old_label);
1226 ++LABEL_NUSES (new_label);
1229 /* Handle casesi dispatch insns. */
1230 if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1231 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1233 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1234 new_label);
1235 --LABEL_NUSES (old_label);
1236 ++LABEL_NUSES (new_label);
1239 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1241 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1242 rtx note;
1244 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1245 return false;
1246 rtx_code_label *new_label = block_label (new_bb);
1248 for (i = 0; i < n; ++i)
1250 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1251 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1252 if (XEXP (old_ref, 0) == old_label)
1254 ASM_OPERANDS_LABEL (tmp, i)
1255 = gen_rtx_LABEL_REF (Pmode, new_label);
1256 --LABEL_NUSES (old_label);
1257 ++LABEL_NUSES (new_label);
1261 if (JUMP_LABEL (insn) == old_label)
1263 JUMP_LABEL (insn) = new_label;
1264 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1265 if (note)
1266 remove_note (insn, note);
1268 else
1270 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1271 if (note)
1272 remove_note (insn, note);
1273 if (JUMP_LABEL (insn) != new_label
1274 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1275 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1277 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1278 != NULL_RTX)
1279 XEXP (note, 0) = new_label;
1281 else
1283 /* ?? We may play the games with moving the named labels from
1284 one basic block to the other in case only one computed_jump is
1285 available. */
1286 if (computed_jump_p (insn)
1287 /* A return instruction can't be redirected. */
1288 || returnjump_p (insn))
1289 return false;
1291 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1293 /* If the insn doesn't go where we think, we're confused. */
1294 gcc_assert (JUMP_LABEL (insn) == old_label);
1296 /* If the substitution doesn't succeed, die. This can happen
1297 if the back end emitted unrecognizable instructions or if
1298 target is exit block on some arches. Or for crossing
1299 jumps. */
1300 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1301 block_label (new_bb), 0))
1303 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1304 || CROSSING_JUMP_P (insn));
1305 return false;
1309 return true;
1313 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1314 NULL on failure */
1315 static edge
1316 redirect_branch_edge (edge e, basic_block target)
1318 rtx_insn *old_label = BB_HEAD (e->dest);
1319 basic_block src = e->src;
1320 rtx_insn *insn = BB_END (src);
1322 /* We can only redirect non-fallthru edges of jump insn. */
1323 if (e->flags & EDGE_FALLTHRU)
1324 return NULL;
1325 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1326 return NULL;
1328 if (!currently_expanding_to_rtl)
1330 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1331 return NULL;
1333 else
1334 /* When expanding this BB might actually contain multiple
1335 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1336 Redirect all of those that match our label. */
1337 FOR_BB_INSNS (src, insn)
1338 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1339 old_label, target))
1340 return NULL;
1342 if (dump_file)
1343 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1344 e->src->index, e->dest->index, target->index);
1346 if (e->dest != target)
1347 e = redirect_edge_succ_nodup (e, target);
1349 return e;
1352 /* Called when edge E has been redirected to a new destination,
1353 in order to update the region crossing flag on the edge and
1354 jump. */
1356 static void
1357 fixup_partition_crossing (edge e)
1359 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1360 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1361 return;
1362 /* If we redirected an existing edge, it may already be marked
1363 crossing, even though the new src is missing a reg crossing note.
1364 But make sure reg crossing note doesn't already exist before
1365 inserting. */
1366 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1368 e->flags |= EDGE_CROSSING;
1369 if (JUMP_P (BB_END (e->src)))
1370 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1372 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1374 e->flags &= ~EDGE_CROSSING;
1375 /* Remove the section crossing note from jump at end of
1376 src if it exists, and if no other successors are
1377 still crossing. */
1378 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1380 bool has_crossing_succ = false;
1381 edge e2;
1382 edge_iterator ei;
1383 FOR_EACH_EDGE (e2, ei, e->src->succs)
1385 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1386 if (has_crossing_succ)
1387 break;
1389 if (!has_crossing_succ)
1390 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1395 /* Called when block BB has been reassigned to the cold partition,
1396 because it is now dominated by another cold block,
1397 to ensure that the region crossing attributes are updated. */
1399 static void
1400 fixup_new_cold_bb (basic_block bb)
1402 edge e;
1403 edge_iterator ei;
1405 /* This is called when a hot bb is found to now be dominated
1406 by a cold bb and therefore needs to become cold. Therefore,
1407 its preds will no longer be region crossing. Any non-dominating
1408 preds that were previously hot would also have become cold
1409 in the caller for the same region. Any preds that were previously
1410 region-crossing will be adjusted in fixup_partition_crossing. */
1411 FOR_EACH_EDGE (e, ei, bb->preds)
1413 fixup_partition_crossing (e);
1416 /* Possibly need to make bb's successor edges region crossing,
1417 or remove stale region crossing. */
1418 FOR_EACH_EDGE (e, ei, bb->succs)
1420 /* We can't have fall-through edges across partition boundaries.
1421 Note that force_nonfallthru will do any necessary partition
1422 boundary fixup by calling fixup_partition_crossing itself. */
1423 if ((e->flags & EDGE_FALLTHRU)
1424 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1425 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1426 force_nonfallthru (e);
1427 else
1428 fixup_partition_crossing (e);
1432 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1433 expense of adding new instructions or reordering basic blocks.
1435 Function can be also called with edge destination equivalent to the TARGET.
1436 Then it should try the simplifications and do nothing if none is possible.
1438 Return edge representing the branch if transformation succeeded. Return NULL
1439 on failure.
1440 We still return NULL in case E already destinated TARGET and we didn't
1441 managed to simplify instruction stream. */
1443 static edge
1444 rtl_redirect_edge_and_branch (edge e, basic_block target)
1446 edge ret;
1447 basic_block src = e->src;
1448 basic_block dest = e->dest;
1450 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1451 return NULL;
1453 if (dest == target)
1454 return e;
1456 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1458 df_set_bb_dirty (src);
1459 fixup_partition_crossing (ret);
1460 return ret;
1463 ret = redirect_branch_edge (e, target);
1464 if (!ret)
1465 return NULL;
1467 df_set_bb_dirty (src);
1468 fixup_partition_crossing (ret);
1469 return ret;
1472 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1474 void
1475 emit_barrier_after_bb (basic_block bb)
1477 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1478 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1479 || current_ir_type () == IR_RTL_CFGLAYOUT);
1480 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1482 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1484 if (BB_FOOTER (bb))
1486 rtx_insn *footer_tail = BB_FOOTER (bb);
1488 while (NEXT_INSN (footer_tail))
1489 footer_tail = NEXT_INSN (footer_tail);
1490 if (!BARRIER_P (footer_tail))
1492 SET_NEXT_INSN (footer_tail) = insn;
1493 SET_PREV_INSN (insn) = footer_tail;
1496 else
1497 BB_FOOTER (bb) = insn;
1501 /* Like force_nonfallthru below, but additionally performs redirection
1502 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1503 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1504 simple_return_rtx, indicating which kind of returnjump to create.
1505 It should be NULL otherwise. */
1507 basic_block
1508 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1510 basic_block jump_block, new_bb = NULL, src = e->src;
1511 rtx note;
1512 edge new_edge;
1513 int abnormal_edge_flags = 0;
1514 bool asm_goto_edge = false;
1515 int loc;
1517 /* In the case the last instruction is conditional jump to the next
1518 instruction, first redirect the jump itself and then continue
1519 by creating a basic block afterwards to redirect fallthru edge. */
1520 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1521 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1522 && any_condjump_p (BB_END (e->src))
1523 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1525 rtx note;
1526 edge b = unchecked_make_edge (e->src, target, 0);
1527 bool redirected;
1529 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1530 block_label (target), 0);
1531 gcc_assert (redirected);
1533 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1534 if (note)
1536 int prob = XINT (note, 0);
1538 b->probability = profile_probability::from_reg_br_prob_note (prob);
1539 e->probability -= e->probability;
1543 if (e->flags & EDGE_ABNORMAL)
1545 /* Irritating special case - fallthru edge to the same block as abnormal
1546 edge.
1547 We can't redirect abnormal edge, but we still can split the fallthru
1548 one and create separate abnormal edge to original destination.
1549 This allows bb-reorder to make such edge non-fallthru. */
1550 gcc_assert (e->dest == target);
1551 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1552 e->flags &= EDGE_FALLTHRU;
1554 else
1556 gcc_assert (e->flags & EDGE_FALLTHRU);
1557 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1559 /* We can't redirect the entry block. Create an empty block
1560 at the start of the function which we use to add the new
1561 jump. */
1562 edge tmp;
1563 edge_iterator ei;
1564 bool found = false;
1566 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1567 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1568 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1570 /* Make sure new block ends up in correct hot/cold section. */
1571 BB_COPY_PARTITION (bb, e->dest);
1573 /* Change the existing edge's source to be the new block, and add
1574 a new edge from the entry block to the new block. */
1575 e->src = bb;
1576 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1577 (tmp = ei_safe_edge (ei)); )
1579 if (tmp == e)
1581 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1582 found = true;
1583 break;
1585 else
1586 ei_next (&ei);
1589 gcc_assert (found);
1591 vec_safe_push (bb->succs, e);
1592 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1593 EDGE_FALLTHRU);
1597 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1598 don't point to the target or fallthru label. */
1599 if (JUMP_P (BB_END (e->src))
1600 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1601 && (e->flags & EDGE_FALLTHRU)
1602 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1604 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1605 bool adjust_jump_target = false;
1607 for (i = 0; i < n; ++i)
1609 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1611 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1612 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1613 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1614 adjust_jump_target = true;
1616 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1617 asm_goto_edge = true;
1619 if (adjust_jump_target)
1621 rtx_insn *insn = BB_END (e->src);
1622 rtx note;
1623 rtx_insn *old_label = BB_HEAD (e->dest);
1624 rtx_insn *new_label = BB_HEAD (target);
1626 if (JUMP_LABEL (insn) == old_label)
1628 JUMP_LABEL (insn) = new_label;
1629 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1630 if (note)
1631 remove_note (insn, note);
1633 else
1635 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1636 if (note)
1637 remove_note (insn, note);
1638 if (JUMP_LABEL (insn) != new_label
1639 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1640 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1642 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1643 != NULL_RTX)
1644 XEXP (note, 0) = new_label;
1648 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1650 rtx_insn *new_head;
1651 profile_count count = e->count ();
1652 profile_probability probability = e->probability;
1653 /* Create the new structures. */
1655 /* If the old block ended with a tablejump, skip its table
1656 by searching forward from there. Otherwise start searching
1657 forward from the last instruction of the old block. */
1658 rtx_jump_table_data *table;
1659 if (tablejump_p (BB_END (e->src), NULL, &table))
1660 new_head = table;
1661 else
1662 new_head = BB_END (e->src);
1663 new_head = NEXT_INSN (new_head);
1665 jump_block = create_basic_block (new_head, NULL, e->src);
1666 jump_block->count = count;
1668 /* Make sure new block ends up in correct hot/cold section. */
1670 BB_COPY_PARTITION (jump_block, e->src);
1672 /* Wire edge in. */
1673 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1674 new_edge->probability = probability;
1676 /* Redirect old edge. */
1677 redirect_edge_pred (e, jump_block);
1678 e->probability = profile_probability::always ();
1680 /* If e->src was previously region crossing, it no longer is
1681 and the reg crossing note should be removed. */
1682 fixup_partition_crossing (new_edge);
1684 /* If asm goto has any label refs to target's label,
1685 add also edge from asm goto bb to target. */
1686 if (asm_goto_edge)
1688 new_edge->probability /= 2;
1689 jump_block->count /= 2;
1690 edge new_edge2 = make_edge (new_edge->src, target,
1691 e->flags & ~EDGE_FALLTHRU);
1692 new_edge2->probability = probability - new_edge->probability;
1695 new_bb = jump_block;
1697 else
1698 jump_block = e->src;
1700 loc = e->goto_locus;
1701 e->flags &= ~EDGE_FALLTHRU;
1702 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1704 if (jump_label == ret_rtx)
1705 emit_jump_insn_after_setloc (targetm.gen_return (),
1706 BB_END (jump_block), loc);
1707 else
1709 gcc_assert (jump_label == simple_return_rtx);
1710 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1711 BB_END (jump_block), loc);
1713 set_return_jump_label (BB_END (jump_block));
1715 else
1717 rtx_code_label *label = block_label (target);
1718 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1719 BB_END (jump_block), loc);
1720 JUMP_LABEL (BB_END (jump_block)) = label;
1721 LABEL_NUSES (label)++;
1724 /* We might be in cfg layout mode, and if so, the following routine will
1725 insert the barrier correctly. */
1726 emit_barrier_after_bb (jump_block);
1727 redirect_edge_succ_nodup (e, target);
1729 if (abnormal_edge_flags)
1730 make_edge (src, target, abnormal_edge_flags);
1732 df_mark_solutions_dirty ();
1733 fixup_partition_crossing (e);
1734 return new_bb;
1737 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1738 (and possibly create new basic block) to make edge non-fallthru.
1739 Return newly created BB or NULL if none. */
1741 static basic_block
1742 rtl_force_nonfallthru (edge e)
1744 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1747 /* Redirect edge even at the expense of creating new jump insn or
1748 basic block. Return new basic block if created, NULL otherwise.
1749 Conversion must be possible. */
1751 static basic_block
1752 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1754 if (redirect_edge_and_branch (e, target)
1755 || e->dest == target)
1756 return NULL;
1758 /* In case the edge redirection failed, try to force it to be non-fallthru
1759 and redirect newly created simplejump. */
1760 df_set_bb_dirty (e->src);
1761 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1764 /* The given edge should potentially be a fallthru edge. If that is in
1765 fact true, delete the jump and barriers that are in the way. */
1767 static void
1768 rtl_tidy_fallthru_edge (edge e)
1770 rtx_insn *q;
1771 basic_block b = e->src, c = b->next_bb;
1773 /* ??? In a late-running flow pass, other folks may have deleted basic
1774 blocks by nopping out blocks, leaving multiple BARRIERs between here
1775 and the target label. They ought to be chastised and fixed.
1777 We can also wind up with a sequence of undeletable labels between
1778 one block and the next.
1780 So search through a sequence of barriers, labels, and notes for
1781 the head of block C and assert that we really do fall through. */
1783 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1784 if (NONDEBUG_INSN_P (q))
1785 return;
1787 /* Remove what will soon cease being the jump insn from the source block.
1788 If block B consisted only of this single jump, turn it into a deleted
1789 note. */
1790 q = BB_END (b);
1791 if (JUMP_P (q)
1792 && onlyjump_p (q)
1793 && (any_uncondjump_p (q)
1794 || single_succ_p (b)))
1796 rtx_insn *label;
1797 rtx_jump_table_data *table;
1799 if (tablejump_p (q, &label, &table))
1801 /* The label is likely mentioned in some instruction before
1802 the tablejump and might not be DCEd, so turn it into
1803 a note instead and move before the tablejump that is going to
1804 be deleted. */
1805 const char *name = LABEL_NAME (label);
1806 PUT_CODE (label, NOTE);
1807 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1808 NOTE_DELETED_LABEL_NAME (label) = name;
1809 reorder_insns (label, label, PREV_INSN (q));
1810 delete_insn (table);
1813 q = PREV_INSN (q);
1815 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1816 together with the barrier) should never have a fallthru edge. */
1817 else if (JUMP_P (q) && any_uncondjump_p (q))
1818 return;
1820 /* Selectively unlink the sequence. */
1821 if (q != PREV_INSN (BB_HEAD (c)))
1822 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1824 e->flags |= EDGE_FALLTHRU;
1827 /* Should move basic block BB after basic block AFTER. NIY. */
1829 static bool
1830 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1831 basic_block after ATTRIBUTE_UNUSED)
1833 return false;
1836 /* Locate the last bb in the same partition as START_BB. */
1838 static basic_block
1839 last_bb_in_partition (basic_block start_bb)
1841 basic_block bb;
1842 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1844 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1845 return bb;
1847 /* Return bb before the exit block. */
1848 return bb->prev_bb;
1851 /* Split a (typically critical) edge. Return the new block.
1852 The edge must not be abnormal.
1854 ??? The code generally expects to be called on critical edges.
1855 The case of a block ending in an unconditional jump to a
1856 block with multiple predecessors is not handled optimally. */
1858 static basic_block
1859 rtl_split_edge (edge edge_in)
1861 basic_block bb, new_bb;
1862 rtx_insn *before;
1864 /* Abnormal edges cannot be split. */
1865 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1867 /* We are going to place the new block in front of edge destination.
1868 Avoid existence of fallthru predecessors. */
1869 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1871 edge e = find_fallthru_edge (edge_in->dest->preds);
1873 if (e)
1874 force_nonfallthru (e);
1877 /* Create the basic block note. */
1878 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1879 before = BB_HEAD (edge_in->dest);
1880 else
1881 before = NULL;
1883 /* If this is a fall through edge to the exit block, the blocks might be
1884 not adjacent, and the right place is after the source. */
1885 if ((edge_in->flags & EDGE_FALLTHRU)
1886 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1888 before = NEXT_INSN (BB_END (edge_in->src));
1889 bb = create_basic_block (before, NULL, edge_in->src);
1890 BB_COPY_PARTITION (bb, edge_in->src);
1892 else
1894 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1896 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1897 BB_COPY_PARTITION (bb, edge_in->dest);
1899 else
1901 basic_block after = edge_in->dest->prev_bb;
1902 /* If this is post-bb reordering, and the edge crosses a partition
1903 boundary, the new block needs to be inserted in the bb chain
1904 at the end of the src partition (since we put the new bb into
1905 that partition, see below). Otherwise we may end up creating
1906 an extra partition crossing in the chain, which is illegal.
1907 It can't go after the src, because src may have a fall-through
1908 to a different block. */
1909 if (crtl->bb_reorder_complete
1910 && (edge_in->flags & EDGE_CROSSING))
1912 after = last_bb_in_partition (edge_in->src);
1913 before = get_last_bb_insn (after);
1914 /* The instruction following the last bb in partition should
1915 be a barrier, since it cannot end in a fall-through. */
1916 gcc_checking_assert (BARRIER_P (before));
1917 before = NEXT_INSN (before);
1919 bb = create_basic_block (before, NULL, after);
1920 /* Put the split bb into the src partition, to avoid creating
1921 a situation where a cold bb dominates a hot bb, in the case
1922 where src is cold and dest is hot. The src will dominate
1923 the new bb (whereas it might not have dominated dest). */
1924 BB_COPY_PARTITION (bb, edge_in->src);
1928 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1930 /* Can't allow a region crossing edge to be fallthrough. */
1931 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1932 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1934 new_bb = force_nonfallthru (single_succ_edge (bb));
1935 gcc_assert (!new_bb);
1938 /* For non-fallthru edges, we must adjust the predecessor's
1939 jump instruction to target our new block. */
1940 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1942 edge redirected = redirect_edge_and_branch (edge_in, bb);
1943 gcc_assert (redirected);
1945 else
1947 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1949 /* For asm goto even splitting of fallthru edge might
1950 need insn patching, as other labels might point to the
1951 old label. */
1952 rtx_insn *last = BB_END (edge_in->src);
1953 if (last
1954 && JUMP_P (last)
1955 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1956 && (extract_asm_operands (PATTERN (last))
1957 || JUMP_LABEL (last) == before)
1958 && patch_jump_insn (last, before, bb))
1959 df_set_bb_dirty (edge_in->src);
1961 redirect_edge_succ (edge_in, bb);
1964 return bb;
1967 /* Queue instructions for insertion on an edge between two basic blocks.
1968 The new instructions and basic blocks (if any) will not appear in the
1969 CFG until commit_edge_insertions is called. */
1971 void
1972 insert_insn_on_edge (rtx pattern, edge e)
1974 /* We cannot insert instructions on an abnormal critical edge.
1975 It will be easier to find the culprit if we die now. */
1976 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1978 if (e->insns.r == NULL_RTX)
1979 start_sequence ();
1980 else
1981 push_to_sequence (e->insns.r);
1983 emit_insn (pattern);
1985 e->insns.r = get_insns ();
1986 end_sequence ();
1989 /* Update the CFG for the instructions queued on edge E. */
1991 void
1992 commit_one_edge_insertion (edge e)
1994 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1995 basic_block bb;
1997 /* Pull the insns off the edge now since the edge might go away. */
1998 insns = e->insns.r;
1999 e->insns.r = NULL;
2001 /* Figure out where to put these insns. If the destination has
2002 one predecessor, insert there. Except for the exit block. */
2003 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2005 bb = e->dest;
2007 /* Get the location correct wrt a code label, and "nice" wrt
2008 a basic block note, and before everything else. */
2009 tmp = BB_HEAD (bb);
2010 if (LABEL_P (tmp))
2011 tmp = NEXT_INSN (tmp);
2012 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2013 tmp = NEXT_INSN (tmp);
2014 if (tmp == BB_HEAD (bb))
2015 before = tmp;
2016 else if (tmp)
2017 after = PREV_INSN (tmp);
2018 else
2019 after = get_last_insn ();
2022 /* If the source has one successor and the edge is not abnormal,
2023 insert there. Except for the entry block.
2024 Don't do this if the predecessor ends in a jump other than
2025 unconditional simple jump. E.g. for asm goto that points all
2026 its labels at the fallthru basic block, we can't insert instructions
2027 before the asm goto, as the asm goto can have various of side effects,
2028 and can't emit instructions after the asm goto, as it must end
2029 the basic block. */
2030 else if ((e->flags & EDGE_ABNORMAL) == 0
2031 && single_succ_p (e->src)
2032 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2033 && (!JUMP_P (BB_END (e->src))
2034 || simplejump_p (BB_END (e->src))))
2036 bb = e->src;
2038 /* It is possible to have a non-simple jump here. Consider a target
2039 where some forms of unconditional jumps clobber a register. This
2040 happens on the fr30 for example.
2042 We know this block has a single successor, so we can just emit
2043 the queued insns before the jump. */
2044 if (JUMP_P (BB_END (bb)))
2045 before = BB_END (bb);
2046 else
2048 /* We'd better be fallthru, or we've lost track of what's what. */
2049 gcc_assert (e->flags & EDGE_FALLTHRU);
2051 after = BB_END (bb);
2055 /* Otherwise we must split the edge. */
2056 else
2058 bb = split_edge (e);
2060 /* If E crossed a partition boundary, we needed to make bb end in
2061 a region-crossing jump, even though it was originally fallthru. */
2062 if (JUMP_P (BB_END (bb)))
2063 before = BB_END (bb);
2064 else
2065 after = BB_END (bb);
2068 /* Now that we've found the spot, do the insertion. */
2069 if (before)
2071 emit_insn_before_noloc (insns, before, bb);
2072 last = prev_nonnote_insn (before);
2074 else
2075 last = emit_insn_after_noloc (insns, after, bb);
2077 if (returnjump_p (last))
2079 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2080 This is not currently a problem because this only happens
2081 for the (single) epilogue, which already has a fallthru edge
2082 to EXIT. */
2084 e = single_succ_edge (bb);
2085 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2086 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2088 e->flags &= ~EDGE_FALLTHRU;
2089 emit_barrier_after (last);
2091 if (before)
2092 delete_insn (before);
2094 else
2095 gcc_assert (!JUMP_P (last));
2098 /* Update the CFG for all queued instructions. */
2100 void
2101 commit_edge_insertions (void)
2103 basic_block bb;
2105 /* Optimization passes that invoke this routine can cause hot blocks
2106 previously reached by both hot and cold blocks to become dominated only
2107 by cold blocks. This will cause the verification below to fail,
2108 and lead to now cold code in the hot section. In some cases this
2109 may only be visible after newly unreachable blocks are deleted,
2110 which will be done by fixup_partitions. */
2111 fixup_partitions ();
2113 if (!currently_expanding_to_rtl)
2114 checking_verify_flow_info ();
2116 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2117 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2119 edge e;
2120 edge_iterator ei;
2122 FOR_EACH_EDGE (e, ei, bb->succs)
2123 if (e->insns.r)
2125 if (currently_expanding_to_rtl)
2126 rebuild_jump_labels_chain (e->insns.r);
2127 commit_one_edge_insertion (e);
2133 /* Print out RTL-specific basic block information (live information
2134 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2135 documented in dumpfile.h. */
2137 static void
2138 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2140 char *s_indent;
2142 s_indent = (char *) alloca ((size_t) indent + 1);
2143 memset (s_indent, ' ', (size_t) indent);
2144 s_indent[indent] = '\0';
2146 if (df && (flags & TDF_DETAILS))
2148 df_dump_top (bb, outf);
2149 putc ('\n', outf);
2152 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK
2153 && rtl_bb_info_initialized_p (bb))
2155 rtx_insn *last = BB_END (bb);
2156 if (last)
2157 last = NEXT_INSN (last);
2158 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2160 if (flags & TDF_DETAILS)
2161 df_dump_insn_top (insn, outf);
2162 if (! (flags & TDF_SLIM))
2163 print_rtl_single (outf, insn);
2164 else
2165 dump_insn_slim (outf, insn);
2166 if (flags & TDF_DETAILS)
2167 df_dump_insn_bottom (insn, outf);
2171 if (df && (flags & TDF_DETAILS))
2173 df_dump_bottom (bb, outf);
2174 putc ('\n', outf);
2179 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2180 for the start of each basic block. FLAGS are the TDF_* masks documented
2181 in dumpfile.h. */
2183 void
2184 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2186 const rtx_insn *tmp_rtx;
2187 if (rtx_first == 0)
2188 fprintf (outf, "(nil)\n");
2189 else
2191 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2192 int max_uid = get_max_uid ();
2193 basic_block *start = XCNEWVEC (basic_block, max_uid);
2194 basic_block *end = XCNEWVEC (basic_block, max_uid);
2195 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2196 basic_block bb;
2198 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2199 insns, but the CFG is not maintained so the basic block info
2200 is not reliable. Therefore it's omitted from the dumps. */
2201 if (! (cfun->curr_properties & PROP_cfg))
2202 flags &= ~TDF_BLOCKS;
2204 if (df)
2205 df_dump_start (outf);
2207 if (cfun->curr_properties & PROP_cfg)
2209 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 rtx_insn *x;
2213 start[INSN_UID (BB_HEAD (bb))] = bb;
2214 end[INSN_UID (BB_END (bb))] = bb;
2215 if (flags & TDF_BLOCKS)
2217 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2219 enum bb_state state = IN_MULTIPLE_BB;
2221 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2222 state = IN_ONE_BB;
2223 in_bb_p[INSN_UID (x)] = state;
2225 if (x == BB_END (bb))
2226 break;
2232 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2234 if (flags & TDF_BLOCKS)
2236 bb = start[INSN_UID (tmp_rtx)];
2237 if (bb != NULL)
2239 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2240 if (df && (flags & TDF_DETAILS))
2241 df_dump_top (bb, outf);
2244 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2245 && !NOTE_P (tmp_rtx)
2246 && !BARRIER_P (tmp_rtx))
2247 fprintf (outf, ";; Insn is not within a basic block\n");
2248 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2249 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2252 if (flags & TDF_DETAILS)
2253 df_dump_insn_top (tmp_rtx, outf);
2254 if (! (flags & TDF_SLIM))
2255 print_rtl_single (outf, tmp_rtx);
2256 else
2257 dump_insn_slim (outf, tmp_rtx);
2258 if (flags & TDF_DETAILS)
2259 df_dump_insn_bottom (tmp_rtx, outf);
2261 bb = end[INSN_UID (tmp_rtx)];
2262 if (bb != NULL)
2264 if (flags & TDF_BLOCKS)
2266 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2267 if (df && (flags & TDF_DETAILS))
2268 df_dump_bottom (bb, outf);
2269 putc ('\n', outf);
2271 /* Emit a hint if the fallthrough target of current basic block
2272 isn't the one placed right next. */
2273 else if (EDGE_COUNT (bb->succs) > 0)
2275 gcc_assert (BB_END (bb) == tmp_rtx);
2276 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2277 /* Bypass intervening deleted-insn notes and debug insns. */
2278 while (ninsn
2279 && !NONDEBUG_INSN_P (ninsn)
2280 && !start[INSN_UID (ninsn)])
2281 ninsn = NEXT_INSN (ninsn);
2282 edge e = find_fallthru_edge (bb->succs);
2283 if (e && ninsn)
2285 basic_block dest = e->dest;
2286 if (start[INSN_UID (ninsn)] != dest)
2287 fprintf (outf, "%s ; pc falls through to BB %d\n",
2288 print_rtx_head, dest->index);
2294 free (start);
2295 free (end);
2296 free (in_bb_p);
2300 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2302 void
2303 update_br_prob_note (basic_block bb)
2305 rtx note;
2306 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2307 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309 if (note)
2311 rtx *note_link, this_rtx;
2313 note_link = &REG_NOTES (BB_END (bb));
2314 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2315 if (this_rtx == note)
2317 *note_link = XEXP (this_rtx, 1);
2318 break;
2321 return;
2323 if (!note
2324 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2325 return;
2326 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2329 /* Get the last insn associated with block BB (that includes barriers and
2330 tablejumps after BB). */
2331 rtx_insn *
2332 get_last_bb_insn (basic_block bb)
2334 rtx_jump_table_data *table;
2335 rtx_insn *tmp;
2336 rtx_insn *end = BB_END (bb);
2338 /* Include any jump table following the basic block. */
2339 if (tablejump_p (end, NULL, &table))
2340 end = table;
2342 /* Include any barriers that may follow the basic block. */
2343 tmp = next_nonnote_nondebug_insn_bb (end);
2344 while (tmp && BARRIER_P (tmp))
2346 end = tmp;
2347 tmp = next_nonnote_nondebug_insn_bb (end);
2350 return end;
2353 /* Add all BBs reachable from entry via hot paths into the SET. */
2355 void
2356 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358 auto_vec<basic_block, 64> worklist;
2360 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2361 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363 while (worklist.length () > 0)
2365 basic_block bb = worklist.pop ();
2366 edge_iterator ei;
2367 edge e;
2369 FOR_EACH_EDGE (e, ei, bb->succs)
2370 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2371 && !set->add (e->dest))
2372 worklist.safe_push (e->dest);
2376 /* Sanity check partition hotness to ensure that basic blocks in
2377   the cold partition don't dominate basic blocks in the hot partition.
2378 If FLAG_ONLY is true, report violations as errors. Otherwise
2379 re-mark the dominated blocks as cold, since this is run after
2380 cfg optimizations that may make hot blocks previously reached
2381 by both hot and cold blocks now only reachable along cold paths. */
2383 static auto_vec<basic_block>
2384 find_partition_fixes (bool flag_only)
2386 basic_block bb;
2387 auto_vec<basic_block> bbs_to_fix;
2388 hash_set<basic_block> set;
2390 /* Callers check this. */
2391 gcc_checking_assert (crtl->has_bb_partition);
2393 find_bbs_reachable_by_hot_paths (&set);
2395 FOR_EACH_BB_FN (bb, cfun)
2396 if (!set.contains (bb)
2397 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2399 if (flag_only)
2400 error ("non-cold basic block %d reachable only "
2401 "by paths crossing the cold partition", bb->index);
2402 else
2403 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2404 bbs_to_fix.safe_push (bb);
2407 return bbs_to_fix;
2410 /* Perform cleanup on the hot/cold bb partitioning after optimization
2411 passes that modify the cfg. */
2413 void
2414 fixup_partitions (void)
2416 if (!crtl->has_bb_partition)
2417 return;
2419 /* Delete any blocks that became unreachable and weren't
2420 already cleaned up, for example during edge forwarding
2421 and convert_jumps_to_returns. This will expose more
2422 opportunities for fixing the partition boundaries here.
2423 Also, the calculation of the dominance graph during verification
2424 will assert if there are unreachable nodes. */
2425 delete_unreachable_blocks ();
2427 /* If there are partitions, do a sanity check on them: A basic block in
2428   a cold partition cannot dominate a basic block in a hot partition.
2429 Fixup any that now violate this requirement, as a result of edge
2430 forwarding and unreachable block deletion.  */
2431 auto_vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2433 /* Do the partition fixup after all necessary blocks have been converted to
2434 cold, so that we only update the region crossings the minimum number of
2435 places, which can require forcing edges to be non fallthru. */
2436 if (! bbs_to_fix.is_empty ())
2440 basic_block bb = bbs_to_fix.pop ();
2441 fixup_new_cold_bb (bb);
2443 while (! bbs_to_fix.is_empty ());
2445 /* Fix up hot cold block grouping if needed. */
2446 if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2448 basic_block bb, first = NULL, second = NULL;
2449 int current_partition = BB_UNPARTITIONED;
2451 FOR_EACH_BB_FN (bb, cfun)
2453 if (current_partition != BB_UNPARTITIONED
2454 && BB_PARTITION (bb) != current_partition)
2456 if (first == NULL)
2457 first = bb;
2458 else if (second == NULL)
2459 second = bb;
2460 else
2462 /* If we switch partitions for the 3rd, 5th etc. time,
2463 move bbs first (inclusive) .. second (exclusive) right
2464 before bb. */
2465 basic_block prev_first = first->prev_bb;
2466 basic_block prev_second = second->prev_bb;
2467 basic_block prev_bb = bb->prev_bb;
2468 prev_first->next_bb = second;
2469 second->prev_bb = prev_first;
2470 prev_second->next_bb = bb;
2471 bb->prev_bb = prev_second;
2472 prev_bb->next_bb = first;
2473 first->prev_bb = prev_bb;
2474 rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2475 rtx_insn *prev_second_insn
2476 = PREV_INSN (BB_HEAD (second));
2477 rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2478 SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2479 SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2480 SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2481 SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2482 SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2483 SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2484 second = NULL;
2487 current_partition = BB_PARTITION (bb);
2489 gcc_assert (!second);
2494 /* Verify, in the basic block chain, that there is at most one switch
2495 between hot/cold partitions. This condition will not be true until
2496 after reorder_basic_blocks is called. */
2498 static bool
2499 verify_hot_cold_block_grouping (void)
2501 basic_block bb;
2502 bool err = false;
2503 bool switched_sections = false;
2504 int current_partition = BB_UNPARTITIONED;
2506 /* Even after bb reordering is complete, we go into cfglayout mode
2507 again (in compgoto). Ensure we don't call this before going back
2508 into linearized RTL when any layout fixes would have been committed. */
2509 if (!crtl->bb_reorder_complete
2510 || current_ir_type () != IR_RTL_CFGRTL)
2511 return err;
2513 FOR_EACH_BB_FN (bb, cfun)
2515 if (current_partition != BB_UNPARTITIONED
2516 && BB_PARTITION (bb) != current_partition)
2518 if (switched_sections)
2520 error ("multiple hot/cold transitions found (bb %i)",
2521 bb->index);
2522 err = true;
2524 else
2525 switched_sections = true;
2527 if (!crtl->has_bb_partition)
2528 error ("partition found but function partition flag not set");
2530 current_partition = BB_PARTITION (bb);
2533 return err;
2537 /* Perform several checks on the edges out of each block, such as
2538 the consistency of the branch probabilities, the correctness
2539 of hot/cold partition crossing edges, and the number of expected
2540 successor edges. Also verify that the dominance relationship
2541 between hot/cold blocks is sane. */
2543 static bool
2544 rtl_verify_edges (void)
2546 bool err = false;
2547 basic_block bb;
2549 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2551 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2552 int n_eh = 0, n_abnormal = 0;
2553 edge e, fallthru = NULL;
2554 edge_iterator ei;
2555 rtx note;
2556 bool has_crossing_edge = false;
2558 if (JUMP_P (BB_END (bb))
2559 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2560 && EDGE_COUNT (bb->succs) >= 2
2561 && any_condjump_p (BB_END (bb)))
2563 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2565 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2567 error ("verify_flow_info: "
2568 "REG_BR_PROB is set but cfg probability is not");
2569 err = true;
2572 else if (XINT (note, 0)
2573 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2574 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2576 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2577 XINT (note, 0),
2578 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2579 err = true;
2583 FOR_EACH_EDGE (e, ei, bb->succs)
2585 bool is_crossing;
2587 if (e->flags & EDGE_FALLTHRU)
2588 n_fallthru++, fallthru = e;
2590 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2591 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2592 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2593 has_crossing_edge |= is_crossing;
2594 if (e->flags & EDGE_CROSSING)
2596 if (!is_crossing)
2598 error ("EDGE_CROSSING incorrectly set across same section");
2599 err = true;
2601 if (e->flags & EDGE_FALLTHRU)
2603 error ("fallthru edge crosses section boundary in bb %i",
2604 e->src->index);
2605 err = true;
2607 if (e->flags & EDGE_EH)
2609 error ("EH edge crosses section boundary in bb %i",
2610 e->src->index);
2611 err = true;
2613 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2615 error ("No region crossing jump at section boundary in bb %i",
2616 bb->index);
2617 err = true;
2620 else if (is_crossing)
2622 error ("EDGE_CROSSING missing across section boundary");
2623 err = true;
2626 if ((e->flags & ~(EDGE_DFS_BACK
2627 | EDGE_CAN_FALLTHRU
2628 | EDGE_IRREDUCIBLE_LOOP
2629 | EDGE_LOOP_EXIT
2630 | EDGE_CROSSING
2631 | EDGE_PRESERVE)) == 0)
2632 n_branch++;
2634 if (e->flags & EDGE_ABNORMAL_CALL)
2635 n_abnormal_call++;
2637 if (e->flags & EDGE_SIBCALL)
2638 n_sibcall++;
2640 if (e->flags & EDGE_EH)
2641 n_eh++;
2643 if (e->flags & EDGE_ABNORMAL)
2644 n_abnormal++;
2647 if (!has_crossing_edge
2648 && JUMP_P (BB_END (bb))
2649 && CROSSING_JUMP_P (BB_END (bb)))
2651 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2652 error ("Region crossing jump across same section in bb %i",
2653 bb->index);
2654 err = true;
2657 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2659 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2660 err = true;
2662 if (n_eh > 1)
2664 error ("too many exception handling edges in bb %i", bb->index);
2665 err = true;
2667 if (n_branch
2668 && (!JUMP_P (BB_END (bb))
2669 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2670 || any_condjump_p (BB_END (bb))))))
2672 error ("too many outgoing branch edges from bb %i", bb->index);
2673 err = true;
2675 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2677 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2678 err = true;
2680 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2682 error ("wrong number of branch edges after unconditional jump"
2683 " in bb %i", bb->index);
2684 err = true;
2686 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2687 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2689 error ("wrong amount of branch edges after conditional jump"
2690 " in bb %i", bb->index);
2691 err = true;
2693 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2695 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2696 err = true;
2698 if (n_sibcall && !CALL_P (BB_END (bb)))
2700 error ("sibcall edges for non-call insn in bb %i", bb->index);
2701 err = true;
2703 if (n_abnormal > n_eh
2704 && !(CALL_P (BB_END (bb))
2705 && n_abnormal == n_abnormal_call + n_sibcall)
2706 && (!JUMP_P (BB_END (bb))
2707 || any_condjump_p (BB_END (bb))
2708 || any_uncondjump_p (BB_END (bb))))
2710 error ("abnormal edges for no purpose in bb %i", bb->index);
2711 err = true;
2714 int has_eh = -1;
2715 FOR_EACH_EDGE (e, ei, bb->preds)
2717 if (has_eh == -1)
2718 has_eh = (e->flags & EDGE_EH);
2719 if ((e->flags & EDGE_EH) == has_eh)
2720 continue;
2721 error ("EH incoming edge mixed with non-EH incoming edges "
2722 "in bb %i", bb->index);
2723 err = true;
2724 break;
2728 /* If there are partitions, do a sanity check on them: A basic block in
2729   a cold partition cannot dominate a basic block in a hot partition.  */
2730 if (crtl->has_bb_partition && !err
2731 && current_ir_type () == IR_RTL_CFGLAYOUT)
2733 auto_vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2734 err = !bbs_to_fix.is_empty ();
2737 /* Clean up. */
2738 return err;
2741 /* Checks on the instructions within blocks. Currently checks that each
2742 block starts with a basic block note, and that basic block notes and
2743 control flow jumps are not found in the middle of the block. */
2745 static bool
2746 rtl_verify_bb_insns (void)
2748 rtx_insn *x;
2749 bool err = false;
2750 basic_block bb;
2752 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2754 /* Now check the header of basic
2755 block. It ought to contain optional CODE_LABEL followed
2756 by NOTE_BASIC_BLOCK. */
2757 x = BB_HEAD (bb);
2758 if (LABEL_P (x))
2760 if (BB_END (bb) == x)
2762 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2763 bb->index);
2764 err = true;
2767 x = NEXT_INSN (x);
2770 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2772 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2773 bb->index);
2774 err = true;
2777 if (BB_END (bb) == x)
2778 /* Do checks for empty blocks here. */
2780 else
2781 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2783 if (NOTE_INSN_BASIC_BLOCK_P (x))
2785 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2786 INSN_UID (x), bb->index);
2787 err = true;
2790 if (x == BB_END (bb))
2791 break;
2793 if (control_flow_insn_p (x))
2795 error ("in basic block %d:", bb->index);
2796 fatal_insn ("flow control insn inside a basic block", x);
2801 /* Clean up. */
2802 return err;
2805 /* Verify that block pointers for instructions in basic blocks, headers and
2806 footers are set appropriately. */
2808 static bool
2809 rtl_verify_bb_pointers (void)
2811 bool err = false;
2812 basic_block bb;
2814 /* Check the general integrity of the basic blocks. */
2815 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2817 rtx_insn *insn;
2819 if (!(bb->flags & BB_RTL))
2821 error ("BB_RTL flag not set for block %d", bb->index);
2822 err = true;
2825 FOR_BB_INSNS (bb, insn)
2826 if (BLOCK_FOR_INSN (insn) != bb)
2828 error ("insn %d basic block pointer is %d, should be %d",
2829 INSN_UID (insn),
2830 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2831 bb->index);
2832 err = true;
2835 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2836 if (!BARRIER_P (insn)
2837 && BLOCK_FOR_INSN (insn) != NULL)
2839 error ("insn %d in header of bb %d has non-NULL basic block",
2840 INSN_UID (insn), bb->index);
2841 err = true;
2843 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2844 if (!BARRIER_P (insn)
2845 && BLOCK_FOR_INSN (insn) != NULL)
2847 error ("insn %d in footer of bb %d has non-NULL basic block",
2848 INSN_UID (insn), bb->index);
2849 err = true;
2853 /* Clean up. */
2854 return err;
2857 /* Verify the CFG and RTL consistency common for both underlying RTL and
2858 cfglayout RTL.
2860 Currently it does following checks:
2862 - overlapping of basic blocks
2863 - insns with wrong BLOCK_FOR_INSN pointers
2864 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2865 - tails of basic blocks (ensure that boundary is necessary)
2866 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2867 and NOTE_INSN_BASIC_BLOCK
2868 - verify that no fall_thru edge crosses hot/cold partition boundaries
2869 - verify that there are no pending RTL branch predictions
2870 - verify that hot blocks are not dominated by cold blocks
2872 In future it can be extended check a lot of other stuff as well
2873 (reachability of basic blocks, life information, etc. etc.). */
2875 static bool
2876 rtl_verify_flow_info_1 (void)
2878 bool err = false;
2880 if (rtl_verify_bb_pointers ())
2881 err = true;
2883 if (rtl_verify_bb_insns ())
2884 err = true;
2886 if (rtl_verify_edges ())
2887 err = true;
2889 return err;
2892 /* Walk the instruction chain and verify that bb head/end pointers
2893 are correct, and that instructions are in exactly one bb and have
2894 correct block pointers. */
2896 static bool
2897 rtl_verify_bb_insn_chain (void)
2899 basic_block bb;
2900 bool err = false;
2901 rtx_insn *x;
2902 rtx_insn *last_head = get_last_insn ();
2903 basic_block *bb_info;
2904 const int max_uid = get_max_uid ();
2906 bb_info = XCNEWVEC (basic_block, max_uid);
2908 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2910 rtx_insn *head = BB_HEAD (bb);
2911 rtx_insn *end = BB_END (bb);
2913 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2915 /* Verify the end of the basic block is in the INSN chain. */
2916 if (x == end)
2917 break;
2919 /* And that the code outside of basic blocks has NULL bb field. */
2920 if (!BARRIER_P (x)
2921 && BLOCK_FOR_INSN (x) != NULL)
2923 error ("insn %d outside of basic blocks has non-NULL bb field",
2924 INSN_UID (x));
2925 err = true;
2929 if (!x)
2931 error ("end insn %d for block %d not found in the insn stream",
2932 INSN_UID (end), bb->index);
2933 err = true;
2936 /* Work backwards from the end to the head of the basic block
2937 to verify the head is in the RTL chain. */
2938 for (; x != NULL_RTX; x = PREV_INSN (x))
2940 /* While walking over the insn chain, verify insns appear
2941 in only one basic block. */
2942 if (bb_info[INSN_UID (x)] != NULL)
2944 error ("insn %d is in multiple basic blocks (%d and %d)",
2945 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2946 err = true;
2949 bb_info[INSN_UID (x)] = bb;
2951 if (x == head)
2952 break;
2954 if (!x)
2956 error ("head insn %d for block %d not found in the insn stream",
2957 INSN_UID (head), bb->index);
2958 err = true;
2961 last_head = PREV_INSN (x);
2964 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2966 /* Check that the code before the first basic block has NULL
2967 bb field. */
2968 if (!BARRIER_P (x)
2969 && BLOCK_FOR_INSN (x) != NULL)
2971 error ("insn %d outside of basic blocks has non-NULL bb field",
2972 INSN_UID (x));
2973 err = true;
2976 free (bb_info);
2978 return err;
2981 /* Verify that fallthru edges point to adjacent blocks in layout order and
2982 that barriers exist after non-fallthru blocks. */
2984 static bool
2985 rtl_verify_fallthru (void)
2987 basic_block bb;
2988 bool err = false;
2990 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2992 edge e;
2994 e = find_fallthru_edge (bb->succs);
2995 if (!e)
2997 rtx_insn *insn;
2999 /* Ensure existence of barrier in BB with no fallthru edges. */
3000 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
3002 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3004 error ("missing barrier after block %i", bb->index);
3005 err = true;
3006 break;
3008 if (BARRIER_P (insn))
3009 break;
3012 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3013 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3015 rtx_insn *insn;
3017 if (e->src->next_bb != e->dest)
3019 error
3020 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3021 e->src->index, e->dest->index);
3022 err = true;
3024 else
3025 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3026 insn = NEXT_INSN (insn))
3027 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3029 error ("verify_flow_info: Incorrect fallthru %i->%i",
3030 e->src->index, e->dest->index);
3031 error ("wrong insn in the fallthru edge");
3032 debug_rtx (insn);
3033 err = true;
3038 return err;
3041 /* Verify that blocks are laid out in consecutive order. While walking the
3042 instructions, verify that all expected instructions are inside the basic
3043 blocks, and that all returns are followed by barriers. */
3045 static bool
3046 rtl_verify_bb_layout (void)
3048 basic_block bb;
3049 bool err = false;
3050 rtx_insn *x, *y;
3051 int num_bb_notes;
3052 rtx_insn * const rtx_first = get_insns ();
3053 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3055 num_bb_notes = 0;
3057 for (x = rtx_first; x; x = NEXT_INSN (x))
3059 if (NOTE_INSN_BASIC_BLOCK_P (x))
3061 bb = NOTE_BASIC_BLOCK (x);
3063 num_bb_notes++;
3064 if (bb != last_bb_seen->next_bb)
3065 internal_error ("basic blocks not laid down consecutively");
3067 curr_bb = last_bb_seen = bb;
3070 if (!curr_bb)
3072 switch (GET_CODE (x))
3074 case BARRIER:
3075 case NOTE:
3076 break;
3078 case CODE_LABEL:
3079 /* An ADDR_VEC is placed outside any basic block. */
3080 if (NEXT_INSN (x)
3081 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3082 x = NEXT_INSN (x);
3084 /* But in any case, non-deletable labels can appear anywhere. */
3085 break;
3087 default:
3088 fatal_insn ("insn outside basic block", x);
3092 if (JUMP_P (x)
3093 && returnjump_p (x) && ! condjump_p (x)
3094 && ! ((y = next_nonnote_nondebug_insn (x))
3095 && BARRIER_P (y)))
3096 fatal_insn ("return not followed by barrier", x);
3098 if (curr_bb && x == BB_END (curr_bb))
3099 curr_bb = NULL;
3102 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3103 internal_error
3104 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3105 num_bb_notes, n_basic_blocks_for_fn (cfun));
3107 return err;
3110 /* Verify the CFG and RTL consistency common for both underlying RTL and
3111 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3113 Currently it does following checks:
3114 - all checks of rtl_verify_flow_info_1
3115 - test head/end pointers
3116 - check that blocks are laid out in consecutive order
3117 - check that all insns are in the basic blocks
3118 (except the switch handling code, barriers and notes)
3119 - check that all returns are followed by barriers
3120 - check that all fallthru edge points to the adjacent blocks
3121 - verify that there is a single hot/cold partition boundary after bbro */
3123 static bool
3124 rtl_verify_flow_info (void)
3126 bool err = false;
3128 if (rtl_verify_flow_info_1 ())
3129 err = true;
3131 if (rtl_verify_bb_insn_chain ())
3132 err = true;
3134 if (rtl_verify_fallthru ())
3135 err = true;
3137 if (rtl_verify_bb_layout ())
3138 err = true;
3140 if (verify_hot_cold_block_grouping ())
3141 err = true;
3143 return err;
3146 /* Assume that the preceding pass has possibly eliminated jump instructions
3147 or converted the unconditional jumps. Eliminate the edges from CFG.
3148 Return true if any edges are eliminated. */
3150 bool
3151 purge_dead_edges (basic_block bb)
3153 edge e;
3154 rtx_insn *insn = BB_END (bb);
3155 rtx note;
3156 bool purged = false;
3157 bool found;
3158 edge_iterator ei;
3160 if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3162 insn = PREV_INSN (insn);
3163 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3165 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3166 if (NONJUMP_INSN_P (insn)
3167 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3169 rtx eqnote;
3171 if (! may_trap_p (PATTERN (insn))
3172 || ((eqnote = find_reg_equal_equiv_note (insn))
3173 && ! may_trap_p (XEXP (eqnote, 0))))
3174 remove_note (insn, note);
3177 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3178 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3180 bool remove = false;
3182 /* There are three types of edges we need to handle correctly here: EH
3183 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3184 latter can appear when nonlocal gotos are used. */
3185 if (e->flags & EDGE_ABNORMAL_CALL)
3187 if (!CALL_P (insn))
3188 remove = true;
3189 else if (can_nonlocal_goto (insn))
3191 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3193 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3195 else
3196 remove = true;
3198 else if (e->flags & EDGE_EH)
3199 remove = !can_throw_internal (insn);
3201 if (remove)
3203 remove_edge (e);
3204 df_set_bb_dirty (bb);
3205 purged = true;
3207 else
3208 ei_next (&ei);
3211 if (JUMP_P (insn))
3213 rtx note;
3214 edge b,f;
3215 edge_iterator ei;
3217 /* We do care only about conditional jumps and simplejumps. */
3218 if (!any_condjump_p (insn)
3219 && !returnjump_p (insn)
3220 && !simplejump_p (insn))
3221 return purged;
3223 /* Branch probability/prediction notes are defined only for
3224 condjumps. We've possibly turned condjump into simplejump. */
3225 if (simplejump_p (insn))
3227 note = find_reg_note (insn, REG_BR_PROB, NULL);
3228 if (note)
3229 remove_note (insn, note);
3230 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3231 remove_note (insn, note);
3234 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3236 /* Avoid abnormal flags to leak from computed jumps turned
3237 into simplejumps. */
3239 e->flags &= ~EDGE_ABNORMAL;
3241 /* See if this edge is one we should keep. */
3242 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3243 /* A conditional jump can fall through into the next
3244 block, so we should keep the edge. */
3246 ei_next (&ei);
3247 continue;
3249 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3250 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3251 /* If the destination block is the target of the jump,
3252 keep the edge. */
3254 ei_next (&ei);
3255 continue;
3257 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3258 && returnjump_p (insn))
3259 /* If the destination block is the exit block, and this
3260 instruction is a return, then keep the edge. */
3262 ei_next (&ei);
3263 continue;
3265 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3266 /* Keep the edges that correspond to exceptions thrown by
3267 this instruction and rematerialize the EDGE_ABNORMAL
3268 flag we just cleared above. */
3270 e->flags |= EDGE_ABNORMAL;
3271 ei_next (&ei);
3272 continue;
3275 /* We do not need this edge. */
3276 df_set_bb_dirty (bb);
3277 purged = true;
3278 remove_edge (e);
3281 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3282 return purged;
3284 if (dump_file)
3285 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3287 if (!optimize)
3288 return purged;
3290 /* Redistribute probabilities. */
3291 if (single_succ_p (bb))
3293 single_succ_edge (bb)->probability = profile_probability::always ();
3295 else
3297 note = find_reg_note (insn, REG_BR_PROB, NULL);
3298 if (!note)
3299 return purged;
3301 b = BRANCH_EDGE (bb);
3302 f = FALLTHRU_EDGE (bb);
3303 b->probability = profile_probability::from_reg_br_prob_note
3304 (XINT (note, 0));
3305 f->probability = b->probability.invert ();
3308 return purged;
3310 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3312 /* First, there should not be any EH or ABCALL edges resulting
3313 from non-local gotos and the like. If there were, we shouldn't
3314 have created the sibcall in the first place. Second, there
3315 should of course never have been a fallthru edge. */
3316 gcc_assert (single_succ_p (bb));
3317 gcc_assert (single_succ_edge (bb)->flags
3318 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3320 return false;
3323 /* If we don't see a jump insn, we don't know exactly why the block would
3324 have been broken at this point. Look for a simple, non-fallthru edge,
3325 as these are only created by conditional branches. If we find such an
3326 edge we know that there used to be a jump here and can then safely
3327 remove all non-fallthru edges. */
3328 found = false;
3329 FOR_EACH_EDGE (e, ei, bb->succs)
3330 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3332 found = true;
3333 break;
3336 if (!found)
3337 return purged;
3339 /* Remove all but the fake and fallthru edges. The fake edge may be
3340 the only successor for this block in the case of noreturn
3341 calls. */
3342 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3344 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3346 df_set_bb_dirty (bb);
3347 remove_edge (e);
3348 purged = true;
3350 else
3351 ei_next (&ei);
3354 gcc_assert (single_succ_p (bb));
3356 single_succ_edge (bb)->probability = profile_probability::always ();
3358 if (dump_file)
3359 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3360 bb->index);
3361 return purged;
3364 /* Search all basic blocks for potentially dead edges and purge them. Return
3365 true if some edge has been eliminated. */
3367 bool
3368 purge_all_dead_edges (void)
3370 bool purged = false;
3371 basic_block bb;
3373 FOR_EACH_BB_FN (bb, cfun)
3374 if (purge_dead_edges (bb))
3375 purged = true;
3377 return purged;
3380 /* This is used by a few passes that emit some instructions after abnormal
3381 calls, moving the basic block's end, while they in fact do want to emit
3382 them on the fallthru edge. Look for abnormal call edges, find backward
3383 the call in the block and insert the instructions on the edge instead.
3385 Similarly, handle instructions throwing exceptions internally.
3387 Return true when instructions have been found and inserted on edges. */
3389 bool
3390 fixup_abnormal_edges (void)
3392 bool inserted = false;
3393 basic_block bb;
3395 FOR_EACH_BB_FN (bb, cfun)
3397 edge e;
3398 edge_iterator ei;
3400 /* Look for cases we are interested in - calls or instructions causing
3401 exceptions. */
3402 FOR_EACH_EDGE (e, ei, bb->succs)
3403 if ((e->flags & EDGE_ABNORMAL_CALL)
3404 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3405 == (EDGE_ABNORMAL | EDGE_EH)))
3406 break;
3408 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3410 rtx_insn *insn;
3412 /* Get past the new insns generated. Allow notes, as the insns
3413 may be already deleted. */
3414 insn = BB_END (bb);
3415 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3416 && !can_throw_internal (insn)
3417 && insn != BB_HEAD (bb))
3418 insn = PREV_INSN (insn);
3420 if (CALL_P (insn) || can_throw_internal (insn))
3422 rtx_insn *stop, *next;
3424 e = find_fallthru_edge (bb->succs);
3426 stop = NEXT_INSN (BB_END (bb));
3427 BB_END (bb) = insn;
3429 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3431 next = NEXT_INSN (insn);
3432 if (INSN_P (insn))
3434 delete_insn (insn);
3436 /* Sometimes there's still the return value USE.
3437 If it's placed after a trapping call (i.e. that
3438 call is the last insn anyway), we have no fallthru
3439 edge. Simply delete this use and don't try to insert
3440 on the non-existent edge.
3441 Similarly, sometimes a call that can throw is
3442 followed in the source with __builtin_unreachable (),
3443 meaning that there is UB if the call returns rather
3444 than throws. If there weren't any instructions
3445 following such calls before, supposedly even the ones
3446 we've deleted aren't significant and can be
3447 removed. */
3448 if (e)
3450 /* We're not deleting it, we're moving it. */
3451 insn->set_undeleted ();
3452 SET_PREV_INSN (insn) = NULL_RTX;
3453 SET_NEXT_INSN (insn) = NULL_RTX;
3455 insert_insn_on_edge (insn, e);
3456 inserted = true;
3459 else if (!BARRIER_P (insn))
3460 set_block_for_insn (insn, NULL);
3464 /* It may be that we don't find any trapping insn. In this
3465 case we discovered quite late that the insn that had been
3466 marked as can_throw_internal in fact couldn't trap at all.
3467 So we should in fact delete the EH edges out of the block. */
3468 else
3469 purge_dead_edges (bb);
3473 return inserted;
3476 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
3477 Note that the INSN should be deleted *after* removing dead edges, so
3478 that the kept edge is the fallthrough edge for a (set (pc) (pc))
3479 but not for a (set (pc) (label_ref FOO)). */
3481 void
3482 update_cfg_for_uncondjump (rtx_insn *insn)
3484 basic_block bb = BLOCK_FOR_INSN (insn);
3485 gcc_assert (BB_END (bb) == insn);
3487 purge_dead_edges (bb);
3489 if (current_ir_type () != IR_RTL_CFGLAYOUT)
3491 if (!find_fallthru_edge (bb->succs))
3493 auto barrier = next_nonnote_nondebug_insn (insn);
3494 if (!barrier || !BARRIER_P (barrier))
3495 emit_barrier_after (insn);
3497 return;
3500 delete_insn (insn);
3501 if (EDGE_COUNT (bb->succs) == 1)
3503 rtx_insn *insn;
3505 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3507 /* Remove barriers from the footer if there are any. */
3508 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
3509 if (BARRIER_P (insn))
3511 if (PREV_INSN (insn))
3512 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3513 else
3514 BB_FOOTER (bb) = NEXT_INSN (insn);
3515 if (NEXT_INSN (insn))
3516 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3518 else if (LABEL_P (insn))
3519 break;
3523 /* Cut the insns from FIRST to LAST out of the insns stream. */
3525 rtx_insn *
3526 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3528 rtx_insn *prevfirst = PREV_INSN (first);
3529 rtx_insn *nextlast = NEXT_INSN (last);
3531 SET_PREV_INSN (first) = NULL;
3532 SET_NEXT_INSN (last) = NULL;
3533 if (prevfirst)
3534 SET_NEXT_INSN (prevfirst) = nextlast;
3535 if (nextlast)
3536 SET_PREV_INSN (nextlast) = prevfirst;
3537 else
3538 set_last_insn (prevfirst);
3539 if (!prevfirst)
3540 set_first_insn (nextlast);
3541 return first;
3544 /* Skip over inter-block insns occurring after BB which are typically
3545 associated with BB (e.g., barriers). If there are any such insns,
3546 we return the last one. Otherwise, we return the end of BB. */
3548 static rtx_insn *
3549 skip_insns_after_block (basic_block bb)
3551 rtx_insn *insn, *last_insn, *next_head, *prev;
3553 next_head = NULL;
3554 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3555 next_head = BB_HEAD (bb->next_bb);
3557 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3559 if (insn == next_head)
3560 break;
3562 switch (GET_CODE (insn))
3564 case BARRIER:
3565 last_insn = insn;
3566 continue;
3568 case NOTE:
3569 gcc_assert (NOTE_KIND (insn) != NOTE_INSN_BLOCK_END);
3570 continue;
3572 case CODE_LABEL:
3573 if (NEXT_INSN (insn)
3574 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3576 insn = NEXT_INSN (insn);
3577 last_insn = insn;
3578 continue;
3580 break;
3582 default:
3583 break;
3586 break;
3589 /* It is possible to hit contradictory sequence. For instance:
3591 jump_insn
3592 NOTE_INSN_BLOCK_BEG
3593 barrier
3595 Where barrier belongs to jump_insn, but the note does not. This can be
3596 created by removing the basic block originally following
3597 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3599 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3601 prev = PREV_INSN (insn);
3602 if (NOTE_P (insn))
3603 switch (NOTE_KIND (insn))
3605 case NOTE_INSN_BLOCK_END:
3606 gcc_unreachable ();
3607 break;
3608 case NOTE_INSN_DELETED:
3609 case NOTE_INSN_DELETED_LABEL:
3610 case NOTE_INSN_DELETED_DEBUG_LABEL:
3611 continue;
3612 default:
3613 reorder_insns (insn, insn, last_insn);
3617 return last_insn;
3620 /* Locate or create a label for a given basic block. */
3622 static rtx_insn *
3623 label_for_bb (basic_block bb)
3625 rtx_insn *label = BB_HEAD (bb);
3627 if (!LABEL_P (label))
3629 if (dump_file)
3630 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3632 label = block_label (bb);
3635 return label;
3638 /* Locate the effective beginning and end of the insn chain for each
3639 block, as defined by skip_insns_after_block above. */
3641 static void
3642 record_effective_endpoints (void)
3644 rtx_insn *next_insn;
3645 basic_block bb;
3646 rtx_insn *insn;
3648 for (insn = get_insns ();
3649 insn
3650 && NOTE_P (insn)
3651 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3652 insn = NEXT_INSN (insn))
3653 continue;
3654 /* No basic blocks at all? */
3655 gcc_assert (insn);
3657 if (PREV_INSN (insn))
3658 cfg_layout_function_header =
3659 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3660 else
3661 cfg_layout_function_header = NULL;
3663 next_insn = get_insns ();
3664 FOR_EACH_BB_FN (bb, cfun)
3666 rtx_insn *end;
3668 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3669 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3670 PREV_INSN (BB_HEAD (bb)));
3671 end = skip_insns_after_block (bb);
3672 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3673 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3674 next_insn = NEXT_INSN (BB_END (bb));
3677 cfg_layout_function_footer = next_insn;
3678 if (cfg_layout_function_footer)
3679 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3682 namespace {
3684 const pass_data pass_data_into_cfg_layout_mode =
3686 RTL_PASS, /* type */
3687 "into_cfglayout", /* name */
3688 OPTGROUP_NONE, /* optinfo_flags */
3689 TV_CFG, /* tv_id */
3690 0, /* properties_required */
3691 PROP_cfglayout, /* properties_provided */
3692 0, /* properties_destroyed */
3693 0, /* todo_flags_start */
3694 0, /* todo_flags_finish */
3697 class pass_into_cfg_layout_mode : public rtl_opt_pass
3699 public:
3700 pass_into_cfg_layout_mode (gcc::context *ctxt)
3701 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3704 /* opt_pass methods: */
3705 unsigned int execute (function *) final override
3707 cfg_layout_initialize (0);
3708 return 0;
3711 }; // class pass_into_cfg_layout_mode
3713 } // anon namespace
3715 rtl_opt_pass *
3716 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3718 return new pass_into_cfg_layout_mode (ctxt);
3721 namespace {
3723 const pass_data pass_data_outof_cfg_layout_mode =
3725 RTL_PASS, /* type */
3726 "outof_cfglayout", /* name */
3727 OPTGROUP_NONE, /* optinfo_flags */
3728 TV_CFG, /* tv_id */
3729 0, /* properties_required */
3730 0, /* properties_provided */
3731 PROP_cfglayout, /* properties_destroyed */
3732 0, /* todo_flags_start */
3733 0, /* todo_flags_finish */
3736 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3738 public:
3739 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3740 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3743 /* opt_pass methods: */
3744 unsigned int execute (function *) final override;
3746 }; // class pass_outof_cfg_layout_mode
3748 unsigned int
3749 pass_outof_cfg_layout_mode::execute (function *fun)
3751 basic_block bb;
3753 FOR_EACH_BB_FN (bb, fun)
3754 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3755 bb->aux = bb->next_bb;
3757 cfg_layout_finalize ();
3759 return 0;
3762 } // anon namespace
3764 rtl_opt_pass *
3765 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3767 return new pass_outof_cfg_layout_mode (ctxt);
3771 /* Link the basic blocks in the correct order, compacting the basic
3772 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3773 function also clears the basic block header and footer fields.
3775 This function is usually called after a pass (e.g. tracer) finishes
3776 some transformations while in cfglayout mode. The required sequence
3777 of the basic blocks is in a linked list along the bb->aux field.
3778 This functions re-links the basic block prev_bb and next_bb pointers
3779 accordingly, and it compacts and renumbers the blocks.
3781 FIXME: This currently works only for RTL, but the only RTL-specific
3782 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3783 to GIMPLE a long time ago, but it doesn't relink the basic block
3784 chain. It could do that (to give better initial RTL) if this function
3785 is made IR-agnostic (and moved to cfganal.cc or cfg.cc while at it). */
3787 void
3788 relink_block_chain (bool stay_in_cfglayout_mode)
3790 basic_block bb, prev_bb;
3791 int index;
3793 /* Maybe dump the re-ordered sequence. */
3794 if (dump_file)
3796 fprintf (dump_file, "Reordered sequence:\n");
3797 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3798 NUM_FIXED_BLOCKS;
3800 bb = (basic_block) bb->aux, index++)
3802 fprintf (dump_file, " %i ", index);
3803 if (get_bb_original (bb))
3804 fprintf (dump_file, "duplicate of %i\n",
3805 get_bb_original (bb)->index);
3806 else if (forwarder_block_p (bb)
3807 && !LABEL_P (BB_HEAD (bb)))
3808 fprintf (dump_file, "compensation\n");
3809 else
3810 fprintf (dump_file, "bb %i\n", bb->index);
3814 /* Now reorder the blocks. */
3815 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3816 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3817 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3819 bb->prev_bb = prev_bb;
3820 prev_bb->next_bb = bb;
3822 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3823 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3825 /* Then, clean up the aux fields. */
3826 FOR_ALL_BB_FN (bb, cfun)
3828 bb->aux = NULL;
3829 if (!stay_in_cfglayout_mode)
3830 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3833 /* Maybe reset the original copy tables, they are not valid anymore
3834 when we renumber the basic blocks in compact_blocks. If we are
3835 are going out of cfglayout mode, don't re-allocate the tables. */
3836 if (original_copy_tables_initialized_p ())
3837 free_original_copy_tables ();
3838 if (stay_in_cfglayout_mode)
3839 initialize_original_copy_tables ();
3841 /* Finally, put basic_block_info in the new order. */
3842 compact_blocks ();
3846 /* Given a reorder chain, rearrange the code to match. */
3848 static void
3849 fixup_reorder_chain (void)
3851 basic_block bb;
3852 rtx_insn *insn = NULL;
3854 if (cfg_layout_function_header)
3856 set_first_insn (cfg_layout_function_header);
3857 insn = cfg_layout_function_header;
3858 while (NEXT_INSN (insn))
3859 insn = NEXT_INSN (insn);
3862 /* First do the bulk reordering -- rechain the blocks without regard to
3863 the needed changes to jumps and labels. */
3865 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3866 bb->aux)
3868 if (BB_HEADER (bb))
3870 if (insn)
3871 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3872 else
3873 set_first_insn (BB_HEADER (bb));
3874 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3875 insn = BB_HEADER (bb);
3876 while (NEXT_INSN (insn))
3877 insn = NEXT_INSN (insn);
3879 if (insn)
3880 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3881 else
3882 set_first_insn (BB_HEAD (bb));
3883 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3884 insn = BB_END (bb);
3885 if (BB_FOOTER (bb))
3887 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3888 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3889 while (NEXT_INSN (insn))
3890 insn = NEXT_INSN (insn);
3894 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3895 if (cfg_layout_function_footer)
3896 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3898 while (NEXT_INSN (insn))
3899 insn = NEXT_INSN (insn);
3901 set_last_insn (insn);
3902 if (flag_checking)
3903 verify_insn_chain ();
3905 /* Now add jumps and labels as needed to match the blocks new
3906 outgoing edges. */
3908 bool remove_unreachable_blocks = false;
3909 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3910 bb->aux)
3912 edge e_fall, e_taken, e;
3913 rtx_insn *bb_end_insn;
3914 rtx ret_label = NULL_RTX;
3915 basic_block nb;
3916 edge_iterator ei;
3917 bool asm_goto = false;
3919 if (EDGE_COUNT (bb->succs) == 0)
3920 continue;
3922 /* Find the old fallthru edge, and another non-EH edge for
3923 a taken jump. */
3924 e_taken = e_fall = NULL;
3926 FOR_EACH_EDGE (e, ei, bb->succs)
3927 if (e->flags & EDGE_FALLTHRU)
3928 e_fall = e;
3929 else if (! (e->flags & EDGE_EH))
3930 e_taken = e;
3932 bb_end_insn = BB_END (bb);
3933 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3935 ret_label = JUMP_LABEL (bb_end_jump);
3936 if (any_condjump_p (bb_end_jump))
3938 /* This might happen if the conditional jump has side
3939 effects and could therefore not be optimized away.
3940 Make the basic block to end with a barrier in order
3941 to prevent rtl_verify_flow_info from complaining. */
3942 if (!e_fall)
3944 gcc_assert (!onlyjump_p (bb_end_jump)
3945 || returnjump_p (bb_end_jump)
3946 || (e_taken->flags & EDGE_CROSSING));
3947 emit_barrier_after (bb_end_jump);
3948 continue;
3951 /* If the old fallthru is still next, nothing to do. */
3952 if (bb->aux == e_fall->dest
3953 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3954 continue;
3956 /* The degenerated case of conditional jump jumping to the next
3957 instruction can happen for jumps with side effects. We need
3958 to construct a forwarder block and this will be done just
3959 fine by force_nonfallthru below. */
3960 if (!e_taken)
3963 /* There is another special case: if *neither* block is next,
3964 such as happens at the very end of a function, then we'll
3965 need to add a new unconditional jump. Choose the taken
3966 edge based on known or assumed probability. */
3967 else if (bb->aux != e_taken->dest)
3969 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3971 if (note
3972 && profile_probability::from_reg_br_prob_note
3973 (XINT (note, 0)) < profile_probability::even ()
3974 && invert_jump (bb_end_jump,
3975 (e_fall->dest
3976 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3977 ? NULL_RTX
3978 : label_for_bb (e_fall->dest)), 0))
3980 e_fall->flags &= ~EDGE_FALLTHRU;
3981 gcc_checking_assert (could_fall_through
3982 (e_taken->src, e_taken->dest));
3983 e_taken->flags |= EDGE_FALLTHRU;
3984 update_br_prob_note (bb);
3985 e = e_fall, e_fall = e_taken, e_taken = e;
3989 /* If the "jumping" edge is a crossing edge, and the fall
3990 through edge is non-crossing, leave things as they are. */
3991 else if ((e_taken->flags & EDGE_CROSSING)
3992 && !(e_fall->flags & EDGE_CROSSING))
3993 continue;
3995 /* Otherwise we can try to invert the jump. This will
3996 basically never fail, however, keep up the pretense. */
3997 else if (invert_jump (bb_end_jump,
3998 (e_fall->dest
3999 == EXIT_BLOCK_PTR_FOR_FN (cfun)
4000 ? NULL_RTX
4001 : label_for_bb (e_fall->dest)), 0))
4003 e_fall->flags &= ~EDGE_FALLTHRU;
4004 gcc_checking_assert (could_fall_through
4005 (e_taken->src, e_taken->dest));
4006 e_taken->flags |= EDGE_FALLTHRU;
4007 update_br_prob_note (bb);
4008 if (LABEL_NUSES (ret_label) == 0
4009 && single_pred_p (e_taken->dest))
4010 delete_insn (as_a<rtx_insn *> (ret_label));
4011 continue;
4014 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
4016 /* If the old fallthru is still next or if
4017 asm goto doesn't have a fallthru (e.g. when followed by
4018 __builtin_unreachable ()), nothing to do. */
4019 if (! e_fall
4020 || bb->aux == e_fall->dest
4021 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4022 continue;
4024 /* Otherwise we'll have to use the fallthru fixup below.
4025 But avoid redirecting asm goto to EXIT. */
4026 asm_goto = true;
4028 else
4030 /* Otherwise we have some return, switch or computed
4031 jump. In the 99% case, there should not have been a
4032 fallthru edge. */
4033 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
4034 continue;
4037 else
4039 /* No fallthru implies a noreturn function with EH edges, or
4040 something similarly bizarre. In any case, we don't need to
4041 do anything. */
4042 if (! e_fall)
4043 continue;
4045 /* If the fallthru block is still next, nothing to do. */
4046 if (bb->aux == e_fall->dest)
4047 continue;
4049 /* A fallthru to exit block. */
4050 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4051 continue;
4054 /* If E_FALL->dest is just a return block, then we can emit a
4055 return rather than a jump to the return block. */
4056 rtx_insn *ret, *use;
4057 basic_block dest;
4058 if (!asm_goto
4059 && bb_is_just_return (e_fall->dest, &ret, &use)
4060 && ((PATTERN (ret) == simple_return_rtx && targetm.have_simple_return ())
4061 || (PATTERN (ret) == ret_rtx && targetm.have_return ())))
4063 ret_label = PATTERN (ret);
4064 dest = EXIT_BLOCK_PTR_FOR_FN (cfun);
4066 e_fall->flags &= ~EDGE_CROSSING;
4067 /* E_FALL->dest might become unreachable as a result of
4068 replacing the jump with a return. So arrange to remove
4069 unreachable blocks. */
4070 remove_unreachable_blocks = true;
4072 else
4074 dest = e_fall->dest;
4077 /* We got here if we need to add a new jump insn.
4078 Note force_nonfallthru can delete E_FALL and thus we have to
4079 save E_FALL->src prior to the call to force_nonfallthru. */
4080 nb = force_nonfallthru_and_redirect (e_fall, dest, ret_label);
4081 if (nb)
4083 nb->aux = bb->aux;
4084 bb->aux = nb;
4085 /* Don't process this new block. */
4086 bb = nb;
4090 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4092 /* Annoying special case - jump around dead jumptables left in the code. */
4093 FOR_EACH_BB_FN (bb, cfun)
4095 edge e = find_fallthru_edge (bb->succs);
4097 if (e && !can_fallthru (e->src, e->dest))
4098 force_nonfallthru (e);
4101 /* Ensure goto_locus from edges has some instructions with that locus in RTL
4102 when not optimizing. */
4103 if (!optimize && !DECL_IGNORED_P (current_function_decl))
4104 FOR_EACH_BB_FN (bb, cfun)
4106 edge e;
4107 edge_iterator ei;
4109 FOR_EACH_EDGE (e, ei, bb->succs)
4110 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4111 && !(e->flags & EDGE_ABNORMAL))
4113 edge e2;
4114 edge_iterator ei2;
4115 basic_block dest, nb;
4116 rtx_insn *end;
4118 insn = BB_END (e->src);
4119 end = PREV_INSN (BB_HEAD (e->src));
4120 while (insn != end
4121 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4122 insn = PREV_INSN (insn);
4123 if (insn != end
4124 && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4125 continue;
4126 if (simplejump_p (BB_END (e->src))
4127 && !INSN_HAS_LOCATION (BB_END (e->src)))
4129 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4130 continue;
4132 dest = e->dest;
4133 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4135 /* Non-fallthru edges to the exit block cannot be split. */
4136 if (!(e->flags & EDGE_FALLTHRU))
4137 continue;
4139 else
4141 insn = BB_HEAD (dest);
4142 end = NEXT_INSN (BB_END (dest));
4143 while (insn != end && !NONDEBUG_INSN_P (insn))
4144 insn = NEXT_INSN (insn);
4145 if (insn != end && INSN_HAS_LOCATION (insn)
4146 && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4147 continue;
4149 nb = split_edge (e);
4150 if (!INSN_P (BB_END (nb)))
4151 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4152 nb);
4153 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4155 /* If there are other incoming edges to the destination block
4156 with the same goto locus, redirect them to the new block as
4157 well, this can prevent other such blocks from being created
4158 in subsequent iterations of the loop. */
4159 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4160 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4161 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4162 && e->goto_locus == e2->goto_locus)
4163 redirect_edge_and_branch (e2, nb);
4164 else
4165 ei_next (&ei2);
4169 /* Replacing a jump with a return may have exposed an unreachable
4170 block. Conditionally remove them if such transformations were
4171 made. */
4172 if (remove_unreachable_blocks)
4173 delete_unreachable_blocks ();
4176 /* Perform sanity checks on the insn chain.
4177 1. Check that next/prev pointers are consistent in both the forward and
4178 reverse direction.
4179 2. Count insns in chain, going both directions, and check if equal.
4180 3. Check that get_last_insn () returns the actual end of chain. */
4182 DEBUG_FUNCTION void
4183 verify_insn_chain (void)
4185 rtx_insn *x, *prevx, *nextx;
4186 int insn_cnt1, insn_cnt2;
4188 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4189 x != 0;
4190 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4191 gcc_assert (PREV_INSN (x) == prevx);
4193 gcc_assert (prevx == get_last_insn ());
4195 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4196 x != 0;
4197 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4198 gcc_assert (NEXT_INSN (x) == nextx);
4200 gcc_assert (insn_cnt1 == insn_cnt2);
4203 /* If we have assembler epilogues, the block falling through to exit must
4204 be the last one in the reordered chain when we reach final. Ensure
4205 that this condition is met. */
4206 static void
4207 fixup_fallthru_exit_predecessor (void)
4209 edge e;
4210 basic_block bb = NULL;
4212 /* This transformation is not valid before reload, because we might
4213 separate a call from the instruction that copies the return
4214 value. */
4215 gcc_assert (reload_completed);
4217 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4218 if (e)
4219 bb = e->src;
4221 if (bb && bb->aux)
4223 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4225 /* If the very first block is the one with the fall-through exit
4226 edge, we have to split that block. */
4227 if (c == bb)
4229 bb = split_block_after_labels (bb)->dest;
4230 bb->aux = c->aux;
4231 c->aux = bb;
4232 BB_FOOTER (bb) = BB_FOOTER (c);
4233 BB_FOOTER (c) = NULL;
4236 while (c->aux != bb)
4237 c = (basic_block) c->aux;
4239 c->aux = bb->aux;
4240 while (c->aux)
4241 c = (basic_block) c->aux;
4243 c->aux = bb;
4244 bb->aux = NULL;
4248 /* In case there are more than one fallthru predecessors of exit, force that
4249 there is only one. */
4251 static void
4252 force_one_exit_fallthru (void)
4254 edge e, predecessor = NULL;
4255 bool more = false;
4256 edge_iterator ei;
4257 basic_block forwarder, bb;
4259 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4260 if (e->flags & EDGE_FALLTHRU)
4262 if (predecessor == NULL)
4263 predecessor = e;
4264 else
4266 more = true;
4267 break;
4271 if (!more)
4272 return;
4274 /* Exit has several fallthru predecessors. Create a forwarder block for
4275 them. */
4276 forwarder = split_edge (predecessor);
4277 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4278 (e = ei_safe_edge (ei)); )
4280 if (e->src == forwarder
4281 || !(e->flags & EDGE_FALLTHRU))
4282 ei_next (&ei);
4283 else
4284 redirect_edge_and_branch_force (e, forwarder);
4287 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4288 exit block. */
4289 FOR_EACH_BB_FN (bb, cfun)
4291 if (bb->aux == NULL && bb != forwarder)
4293 bb->aux = forwarder;
4294 break;
4299 /* Return true in case it is possible to duplicate the basic block BB. */
4301 static bool
4302 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4304 /* Do not attempt to duplicate tablejumps, as we need to unshare
4305 the dispatch table. This is difficult to do, as the instructions
4306 computing jump destination may be hoisted outside the basic block. */
4307 if (tablejump_p (BB_END (bb), NULL, NULL))
4308 return false;
4310 /* Do not duplicate blocks containing insns that can't be copied. */
4311 if (targetm.cannot_copy_insn_p)
4313 rtx_insn *insn = BB_HEAD (bb);
4314 while (1)
4316 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4317 return false;
4318 if (insn == BB_END (bb))
4319 break;
4320 insn = NEXT_INSN (insn);
4324 return true;
4327 rtx_insn *
4328 duplicate_insn_chain (rtx_insn *from, rtx_insn *to,
4329 class loop *loop, copy_bb_data *id)
4331 rtx_insn *insn, *next, *copy;
4332 rtx_note *last;
4334 /* Avoid updating of boundaries of previous basic block. The
4335 note will get removed from insn stream in fixup. */
4336 last = emit_note (NOTE_INSN_DELETED);
4338 /* Create copy at the end of INSN chain. The chain will
4339 be reordered later. */
4340 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4342 switch (GET_CODE (insn))
4344 case DEBUG_INSN:
4345 /* Don't duplicate label debug insns. */
4346 if (DEBUG_BIND_INSN_P (insn)
4347 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4348 break;
4349 /* FALLTHRU */
4350 case INSN:
4351 case CALL_INSN:
4352 case JUMP_INSN:
4353 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4354 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4355 && ANY_RETURN_P (JUMP_LABEL (insn)))
4356 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4357 maybe_copy_prologue_epilogue_insn (insn, copy);
4358 /* If requested remap dependence info of cliques brought in
4359 via inlining. */
4360 if (id)
4362 subrtx_iterator::array_type array;
4363 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4364 if (MEM_P (*iter) && MEM_EXPR (*iter))
4366 tree op = MEM_EXPR (*iter);
4367 if (TREE_CODE (op) == WITH_SIZE_EXPR)
4368 op = TREE_OPERAND (op, 0);
4369 while (handled_component_p (op))
4370 op = TREE_OPERAND (op, 0);
4371 if ((TREE_CODE (op) == MEM_REF
4372 || TREE_CODE (op) == TARGET_MEM_REF)
4373 && MR_DEPENDENCE_CLIQUE (op) > 1
4374 && (!loop
4375 || (MR_DEPENDENCE_CLIQUE (op)
4376 != loop->owned_clique)))
4378 if (!id->dependence_map)
4379 id->dependence_map = new hash_map<dependence_hash,
4380 unsigned short>;
4381 bool existed;
4382 unsigned short &newc = id->dependence_map->get_or_insert
4383 (MR_DEPENDENCE_CLIQUE (op), &existed);
4384 if (!existed)
4386 gcc_assert
4387 (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
4388 newc = ++cfun->last_clique;
4390 /* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4391 since MEM_EXPR is shared so make a copy and
4392 walk to the subtree again. */
4393 tree new_expr = unshare_expr (MEM_EXPR (*iter));
4394 if (TREE_CODE (new_expr) == WITH_SIZE_EXPR)
4395 new_expr = TREE_OPERAND (new_expr, 0);
4396 while (handled_component_p (new_expr))
4397 new_expr = TREE_OPERAND (new_expr, 0);
4398 MR_DEPENDENCE_CLIQUE (new_expr) = newc;
4399 set_mem_expr (const_cast <rtx> (*iter), new_expr);
4403 break;
4405 case JUMP_TABLE_DATA:
4406 /* Avoid copying of dispatch tables. We never duplicate
4407 tablejumps, so this can hit only in case the table got
4408 moved far from original jump.
4409 Avoid copying following barrier as well if any
4410 (and debug insns in between). */
4411 for (next = NEXT_INSN (insn);
4412 next != NEXT_INSN (to);
4413 next = NEXT_INSN (next))
4414 if (!DEBUG_INSN_P (next))
4415 break;
4416 if (next != NEXT_INSN (to) && BARRIER_P (next))
4417 insn = next;
4418 break;
4420 case CODE_LABEL:
4421 break;
4423 case BARRIER:
4424 emit_barrier ();
4425 break;
4427 case NOTE:
4428 switch (NOTE_KIND (insn))
4430 /* In case prologue is empty and function contain label
4431 in first BB, we may want to copy the block. */
4432 case NOTE_INSN_PROLOGUE_END:
4434 case NOTE_INSN_DELETED:
4435 case NOTE_INSN_DELETED_LABEL:
4436 case NOTE_INSN_DELETED_DEBUG_LABEL:
4437 /* No problem to strip these. */
4438 case NOTE_INSN_FUNCTION_BEG:
4439 /* There is always just single entry to function. */
4440 case NOTE_INSN_BASIC_BLOCK:
4441 /* We should only switch text sections once. */
4442 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4443 break;
4445 case NOTE_INSN_EPILOGUE_BEG:
4446 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4447 emit_note_copy (as_a <rtx_note *> (insn));
4448 break;
4450 default:
4451 /* All other notes should have already been eliminated. */
4452 gcc_unreachable ();
4454 break;
4455 default:
4456 gcc_unreachable ();
4459 insn = NEXT_INSN (last);
4460 delete_insn (last);
4461 return insn;
4464 /* Create a duplicate of the basic block BB. */
4466 static basic_block
4467 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *id)
4469 rtx_insn *insn;
4470 basic_block new_bb;
4472 class loop *loop = (id && current_loops) ? bb->loop_father : NULL;
4474 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb), loop, id);
4475 new_bb = create_basic_block (insn,
4476 insn ? get_last_insn () : NULL,
4477 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4479 BB_COPY_PARTITION (new_bb, bb);
4480 if (BB_HEADER (bb))
4482 insn = BB_HEADER (bb);
4483 while (NEXT_INSN (insn))
4484 insn = NEXT_INSN (insn);
4485 insn = duplicate_insn_chain (BB_HEADER (bb), insn, loop, id);
4486 if (insn)
4487 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4490 if (BB_FOOTER (bb))
4492 insn = BB_FOOTER (bb);
4493 while (NEXT_INSN (insn))
4494 insn = NEXT_INSN (insn);
4495 insn = duplicate_insn_chain (BB_FOOTER (bb), insn, loop, id);
4496 if (insn)
4497 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4500 return new_bb;
4504 /* Main entry point to this module - initialize the datastructures for
4505 CFG layout changes. It keeps LOOPS up-to-date if not null.
4507 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4509 void
4510 cfg_layout_initialize (int flags)
4512 rtx_insn_list *x;
4513 basic_block bb;
4515 /* Once bb partitioning is complete, cfg layout mode should not be
4516 re-entered. Entering cfg layout mode may require fixups. As an
4517 example, if edge forwarding performed when optimizing the cfg
4518 layout required moving a block from the hot to the cold
4519 section. This would create an illegal partitioning unless some
4520 manual fixup was performed. */
4521 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4523 initialize_original_copy_tables ();
4525 cfg_layout_rtl_register_cfg_hooks ();
4527 record_effective_endpoints ();
4529 /* Make sure that the targets of non local gotos are marked. */
4530 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4532 bb = BLOCK_FOR_INSN (x->insn ());
4533 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4536 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4539 /* Splits superblocks. */
4540 void
4541 break_superblocks (void)
4543 bool need = false;
4544 basic_block bb;
4546 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4547 bitmap_clear (superblocks);
4549 FOR_EACH_BB_FN (bb, cfun)
4550 if (bb->flags & BB_SUPERBLOCK)
4552 bb->flags &= ~BB_SUPERBLOCK;
4553 bitmap_set_bit (superblocks, bb->index);
4554 need = true;
4557 if (need)
4559 rebuild_jump_labels (get_insns ());
4560 find_many_sub_basic_blocks (superblocks);
4564 /* Finalize the changes: reorder insn list according to the sequence specified
4565 by aux pointers, enter compensation code, rebuild scope forest. */
4567 void
4568 cfg_layout_finalize (void)
4570 free_dominance_info (CDI_DOMINATORS);
4571 force_one_exit_fallthru ();
4572 rtl_register_cfg_hooks ();
4573 if (reload_completed && !targetm.have_epilogue ())
4574 fixup_fallthru_exit_predecessor ();
4575 fixup_reorder_chain ();
4577 rebuild_jump_labels (get_insns ());
4578 delete_dead_jumptables ();
4580 if (flag_checking)
4581 verify_insn_chain ();
4582 checking_verify_flow_info ();
4586 /* Same as split_block but update cfg_layout structures. */
4588 static basic_block
4589 cfg_layout_split_block (basic_block bb, void *insnp)
4591 rtx insn = (rtx) insnp;
4592 basic_block new_bb = rtl_split_block (bb, insn);
4594 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4595 BB_FOOTER (bb) = NULL;
4597 return new_bb;
4600 /* Redirect Edge to DEST. */
4601 static edge
4602 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4604 basic_block src = e->src;
4605 edge ret;
4607 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4608 return NULL;
4610 if (e->dest == dest)
4611 return e;
4613 if (e->flags & EDGE_CROSSING
4614 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4615 && simplejump_p (BB_END (src)))
4617 if (dump_file)
4618 fprintf (dump_file,
4619 "Removing crossing jump while redirecting edge form %i to %i\n",
4620 e->src->index, dest->index);
4621 delete_insn (BB_END (src));
4622 remove_barriers_from_footer (src);
4623 e->flags |= EDGE_FALLTHRU;
4626 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4627 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4629 df_set_bb_dirty (src);
4630 return ret;
4633 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4634 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4636 if (dump_file)
4637 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4638 e->src->index, dest->index);
4640 df_set_bb_dirty (e->src);
4641 redirect_edge_succ (e, dest);
4642 return e;
4645 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4646 in the case the basic block appears to be in sequence. Avoid this
4647 transformation. */
4649 if (e->flags & EDGE_FALLTHRU)
4651 /* Redirect any branch edges unified with the fallthru one. */
4652 if (JUMP_P (BB_END (src))
4653 && label_is_jump_target_p (BB_HEAD (e->dest),
4654 BB_END (src)))
4656 edge redirected;
4658 if (dump_file)
4659 fprintf (dump_file, "Fallthru edge unified with branch "
4660 "%i->%i redirected to %i\n",
4661 e->src->index, e->dest->index, dest->index);
4662 e->flags &= ~EDGE_FALLTHRU;
4663 redirected = redirect_branch_edge (e, dest);
4664 gcc_assert (redirected);
4665 redirected->flags |= EDGE_FALLTHRU;
4666 df_set_bb_dirty (redirected->src);
4667 return redirected;
4669 /* In case we are redirecting fallthru edge to the branch edge
4670 of conditional jump, remove it. */
4671 if (EDGE_COUNT (src->succs) == 2)
4673 /* Find the edge that is different from E. */
4674 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4676 if (s->dest == dest
4677 && any_condjump_p (BB_END (src))
4678 && onlyjump_p (BB_END (src)))
4679 delete_insn (BB_END (src));
4681 if (dump_file)
4682 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4683 e->src->index, e->dest->index, dest->index);
4684 ret = redirect_edge_succ_nodup (e, dest);
4686 else
4687 ret = redirect_branch_edge (e, dest);
4689 if (!ret)
4690 return NULL;
4692 fixup_partition_crossing (ret);
4693 /* We don't want simplejumps in the insn stream during cfglayout. */
4694 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4696 df_set_bb_dirty (src);
4697 return ret;
4700 /* Simple wrapper as we always can redirect fallthru edges. */
4701 static basic_block
4702 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4704 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4706 gcc_assert (redirected);
4707 return NULL;
4710 /* Same as delete_basic_block but update cfg_layout structures. */
4712 static void
4713 cfg_layout_delete_block (basic_block bb)
4715 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4716 rtx_insn **to;
4718 if (BB_HEADER (bb))
4720 next = BB_HEAD (bb);
4721 if (prev)
4722 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4723 else
4724 set_first_insn (BB_HEADER (bb));
4725 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4726 insn = BB_HEADER (bb);
4727 while (NEXT_INSN (insn))
4728 insn = NEXT_INSN (insn);
4729 SET_NEXT_INSN (insn) = next;
4730 SET_PREV_INSN (next) = insn;
4732 next = NEXT_INSN (BB_END (bb));
4733 if (BB_FOOTER (bb))
4735 insn = BB_FOOTER (bb);
4736 while (insn)
4738 if (BARRIER_P (insn))
4740 if (PREV_INSN (insn))
4741 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4742 else
4743 BB_FOOTER (bb) = NEXT_INSN (insn);
4744 if (NEXT_INSN (insn))
4745 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4747 if (LABEL_P (insn))
4748 break;
4749 insn = NEXT_INSN (insn);
4751 if (BB_FOOTER (bb))
4753 insn = BB_END (bb);
4754 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4755 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4756 while (NEXT_INSN (insn))
4757 insn = NEXT_INSN (insn);
4758 SET_NEXT_INSN (insn) = next;
4759 if (next)
4760 SET_PREV_INSN (next) = insn;
4761 else
4762 set_last_insn (insn);
4765 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4766 to = &BB_HEADER (bb->next_bb);
4767 else
4768 to = &cfg_layout_function_footer;
4770 rtl_delete_block (bb);
4772 if (prev)
4773 prev = NEXT_INSN (prev);
4774 else
4775 prev = get_insns ();
4776 if (next)
4777 next = PREV_INSN (next);
4778 else
4779 next = get_last_insn ();
4781 if (next && NEXT_INSN (next) != prev)
4783 remaints = unlink_insn_chain (prev, next);
4784 insn = remaints;
4785 while (NEXT_INSN (insn))
4786 insn = NEXT_INSN (insn);
4787 SET_NEXT_INSN (insn) = *to;
4788 if (*to)
4789 SET_PREV_INSN (*to) = insn;
4790 *to = remaints;
4794 /* Return true when blocks A and B can be safely merged. */
4796 static bool
4797 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4799 /* If we are partitioning hot/cold basic blocks, we don't want to
4800 mess up unconditional or indirect jumps that cross between hot
4801 and cold sections.
4803 Basic block partitioning may result in some jumps that appear to
4804 be optimizable (or blocks that appear to be mergeable), but which really
4805 must be left untouched (they are required to make it safely across
4806 partition boundaries). See the comments at the top of
4807 bb-reorder.cc:partition_hot_cold_basic_blocks for complete details. */
4809 if (BB_PARTITION (a) != BB_PARTITION (b))
4810 return false;
4812 /* Protect the loop latches. */
4813 if (current_loops && b->loop_father->latch == b)
4814 return false;
4816 /* If we would end up moving B's instructions, make sure it doesn't fall
4817 through into the exit block, since we cannot recover from a fallthrough
4818 edge into the exit block occurring in the middle of a function. */
4819 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4821 edge e = find_fallthru_edge (b->succs);
4822 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4823 return false;
4826 /* There must be exactly one edge in between the blocks. */
4827 return (single_succ_p (a)
4828 && single_succ (a) == b
4829 && single_pred_p (b) == 1
4830 && a != b
4831 /* Must be simple edge. */
4832 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4833 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4834 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4835 /* If the jump insn has side effects, we can't kill the edge.
4836 When not optimizing, try_redirect_by_replacing_jump will
4837 not allow us to redirect an edge by replacing a table jump. */
4838 && (!JUMP_P (BB_END (a))
4839 || ((!optimize || reload_completed)
4840 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4843 /* Merge block A and B. The blocks must be mergeable. */
4845 static void
4846 cfg_layout_merge_blocks (basic_block a, basic_block b)
4848 /* If B is a forwarder block whose outgoing edge has no location, we'll
4849 propagate the locus of the edge between A and B onto it. */
4850 const bool forward_edge_locus
4851 = (b->flags & BB_FORWARDER_BLOCK) != 0
4852 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4853 rtx_insn *insn;
4855 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4857 if (dump_file)
4858 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4859 a->index);
4861 /* If there was a CODE_LABEL beginning B, delete it. */
4862 if (LABEL_P (BB_HEAD (b)))
4864 delete_insn (BB_HEAD (b));
4867 /* We should have fallthru edge in a, or we can do dummy redirection to get
4868 it cleaned up. */
4869 if (JUMP_P (BB_END (a)))
4870 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4871 gcc_assert (!JUMP_P (BB_END (a)));
4873 /* If not optimizing, preserve the locus of the single edge between
4874 blocks A and B if necessary by emitting a nop. */
4875 if (!optimize
4876 && !forward_edge_locus
4877 && !DECL_IGNORED_P (current_function_decl))
4878 emit_nop_for_unique_locus_between (a, b);
4880 /* Move things from b->footer after a->footer. */
4881 if (BB_FOOTER (b))
4883 if (!BB_FOOTER (a))
4884 BB_FOOTER (a) = BB_FOOTER (b);
4885 else
4887 rtx_insn *last = BB_FOOTER (a);
4889 while (NEXT_INSN (last))
4890 last = NEXT_INSN (last);
4891 SET_NEXT_INSN (last) = BB_FOOTER (b);
4892 SET_PREV_INSN (BB_FOOTER (b)) = last;
4894 BB_FOOTER (b) = NULL;
4897 /* Move things from b->header before a->footer.
4898 Note that this may include dead tablejump data, but we don't clean
4899 those up until we go out of cfglayout mode. */
4900 if (BB_HEADER (b))
4902 if (! BB_FOOTER (a))
4903 BB_FOOTER (a) = BB_HEADER (b);
4904 else
4906 rtx_insn *last = BB_HEADER (b);
4908 while (NEXT_INSN (last))
4909 last = NEXT_INSN (last);
4910 SET_NEXT_INSN (last) = BB_FOOTER (a);
4911 SET_PREV_INSN (BB_FOOTER (a)) = last;
4912 BB_FOOTER (a) = BB_HEADER (b);
4914 BB_HEADER (b) = NULL;
4917 /* In the case basic blocks are not adjacent, move them around. */
4918 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4920 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4922 emit_insn_after_noloc (insn, BB_END (a), a);
4924 /* Otherwise just re-associate the instructions. */
4925 else
4927 insn = BB_HEAD (b);
4928 BB_END (a) = BB_END (b);
4931 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4932 We need to explicitly call. */
4933 update_bb_for_insn_chain (insn, BB_END (b), a);
4935 /* Skip possible DELETED_LABEL insn. */
4936 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4937 insn = NEXT_INSN (insn);
4938 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4939 BB_HEAD (b) = BB_END (b) = NULL;
4940 delete_insn (insn);
4942 df_bb_delete (b->index);
4944 if (forward_edge_locus)
4945 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4947 if (dump_file)
4948 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4951 /* Split edge E. */
4953 static basic_block
4954 cfg_layout_split_edge (edge e)
4956 basic_block new_bb =
4957 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4958 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4959 NULL_RTX, e->src);
4961 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4962 BB_COPY_PARTITION (new_bb, e->src);
4963 else
4964 BB_COPY_PARTITION (new_bb, e->dest);
4965 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4966 redirect_edge_and_branch_force (e, new_bb);
4968 return new_bb;
4971 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4973 static void
4974 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4978 /* Return true if BB contains only labels or non-executable
4979 instructions. */
4981 static bool
4982 rtl_block_empty_p (basic_block bb)
4984 rtx_insn *insn;
4986 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4987 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4988 return true;
4990 FOR_BB_INSNS (bb, insn)
4991 if (NONDEBUG_INSN_P (insn)
4992 && (!any_uncondjump_p (insn) || !onlyjump_p (insn)))
4993 return false;
4995 return true;
4998 /* Split a basic block if it ends with a conditional branch and if
4999 the other part of the block is not empty. */
5001 static basic_block
5002 rtl_split_block_before_cond_jump (basic_block bb)
5004 rtx_insn *insn;
5005 rtx_insn *split_point = NULL;
5006 rtx_insn *last = NULL;
5007 bool found_code = false;
5009 FOR_BB_INSNS (bb, insn)
5011 if (any_condjump_p (insn))
5012 split_point = last;
5013 else if (NONDEBUG_INSN_P (insn))
5014 found_code = true;
5015 last = insn;
5018 /* Did not find everything. */
5019 if (found_code && split_point)
5020 return split_block (bb, split_point)->dest;
5021 else
5022 return NULL;
5025 /* Return true if BB ends with a call, possibly followed by some
5026 instructions that must stay with the call, false otherwise. */
5028 static bool
5029 rtl_block_ends_with_call_p (basic_block bb)
5031 rtx_insn *insn = BB_END (bb);
5033 while (!CALL_P (insn)
5034 && insn != BB_HEAD (bb)
5035 && (keep_with_call_p (insn)
5036 || NOTE_P (insn)
5037 || DEBUG_INSN_P (insn)))
5038 insn = PREV_INSN (insn);
5039 return (CALL_P (insn));
5042 /* Return true if BB ends with a conditional branch, false otherwise. */
5044 static bool
5045 rtl_block_ends_with_condjump_p (const_basic_block bb)
5047 return any_condjump_p (BB_END (bb));
5050 /* Return true if we need to add fake edge to exit.
5051 Helper function for rtl_flow_call_edges_add. */
5053 static bool
5054 need_fake_edge_p (const rtx_insn *insn)
5056 if (!INSN_P (insn))
5057 return false;
5059 if ((CALL_P (insn)
5060 && !SIBLING_CALL_P (insn)
5061 && !find_reg_note (insn, REG_NORETURN, NULL)
5062 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
5063 return true;
5065 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5066 && MEM_VOLATILE_P (PATTERN (insn)))
5067 || (GET_CODE (PATTERN (insn)) == PARALLEL
5068 && asm_noperands (insn) != -1
5069 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
5070 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
5073 /* Add fake edges to the function exit for any non constant and non noreturn
5074 calls, volatile inline assembly in the bitmap of blocks specified by
5075 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
5076 that were split.
5078 The goal is to expose cases in which entering a basic block does not imply
5079 that all subsequent instructions must be executed. */
5081 static int
5082 rtl_flow_call_edges_add (sbitmap blocks)
5084 int i;
5085 int blocks_split = 0;
5086 int last_bb = last_basic_block_for_fn (cfun);
5087 bool check_last_block = false;
5089 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
5090 return 0;
5092 if (! blocks)
5093 check_last_block = true;
5094 else
5095 check_last_block = bitmap_bit_p (blocks,
5096 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
5098 /* In the last basic block, before epilogue generation, there will be
5099 a fallthru edge to EXIT. Special care is required if the last insn
5100 of the last basic block is a call because make_edge folds duplicate
5101 edges, which would result in the fallthru edge also being marked
5102 fake, which would result in the fallthru edge being removed by
5103 remove_fake_edges, which would result in an invalid CFG.
5105 Moreover, we can't elide the outgoing fake edge, since the block
5106 profiler needs to take this into account in order to solve the minimal
5107 spanning tree in the case that the call doesn't return.
5109 Handle this by adding a dummy instruction in a new last basic block. */
5110 if (check_last_block)
5112 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5113 rtx_insn *insn = BB_END (bb);
5115 /* Back up past insns that must be kept in the same block as a call. */
5116 while (insn != BB_HEAD (bb)
5117 && keep_with_call_p (insn))
5118 insn = PREV_INSN (insn);
5120 if (need_fake_edge_p (insn))
5122 edge e;
5124 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5125 if (e)
5127 insert_insn_on_edge (gen_use (const0_rtx), e);
5128 commit_edge_insertions ();
5133 /* Now add fake edges to the function exit for any non constant
5134 calls since there is no way that we can determine if they will
5135 return or not... */
5137 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5139 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5140 rtx_insn *insn;
5141 rtx_insn *prev_insn;
5143 if (!bb)
5144 continue;
5146 if (blocks && !bitmap_bit_p (blocks, i))
5147 continue;
5149 for (insn = BB_END (bb); ; insn = prev_insn)
5151 prev_insn = PREV_INSN (insn);
5152 if (need_fake_edge_p (insn))
5154 edge e;
5155 rtx_insn *split_at_insn = insn;
5157 /* Don't split the block between a call and an insn that should
5158 remain in the same block as the call. */
5159 if (CALL_P (insn))
5160 while (split_at_insn != BB_END (bb)
5161 && keep_with_call_p (NEXT_INSN (split_at_insn)))
5162 split_at_insn = NEXT_INSN (split_at_insn);
5164 /* The handling above of the final block before the epilogue
5165 should be enough to verify that there is no edge to the exit
5166 block in CFG already. Calling make_edge in such case would
5167 cause us to mark that edge as fake and remove it later. */
5169 if (flag_checking && split_at_insn == BB_END (bb))
5171 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5172 gcc_assert (e == NULL);
5175 /* Note that the following may create a new basic block
5176 and renumber the existing basic blocks. */
5177 if (split_at_insn != BB_END (bb))
5179 e = split_block (bb, split_at_insn);
5180 if (e)
5181 blocks_split++;
5184 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5185 ne->probability = profile_probability::guessed_never ();
5188 if (insn == BB_HEAD (bb))
5189 break;
5193 if (blocks_split)
5194 verify_flow_info ();
5196 return blocks_split;
5199 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5200 the conditional branch target, SECOND_HEAD should be the fall-thru
5201 there is no need to handle this here the loop versioning code handles
5202 this. the reason for SECON_HEAD is that it is needed for condition
5203 in trees, and this should be of the same type since it is a hook. */
5204 static void
5205 rtl_lv_add_condition_to_bb (basic_block first_head ,
5206 basic_block second_head ATTRIBUTE_UNUSED,
5207 basic_block cond_bb, void *comp_rtx)
5209 rtx_code_label *label;
5210 rtx_insn *seq, *jump;
5211 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5212 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5213 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5214 machine_mode mode;
5217 label = block_label (first_head);
5218 mode = GET_MODE (op0);
5219 if (mode == VOIDmode)
5220 mode = GET_MODE (op1);
5222 start_sequence ();
5223 op0 = force_operand (op0, NULL_RTX);
5224 op1 = force_operand (op1, NULL_RTX);
5225 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5226 profile_probability::uninitialized ());
5227 jump = get_last_insn ();
5228 JUMP_LABEL (jump) = label;
5229 LABEL_NUSES (label)++;
5230 seq = get_insns ();
5231 end_sequence ();
5233 /* Add the new cond, in the new head. */
5234 emit_insn_after (seq, BB_END (cond_bb));
5238 /* Given a block B with unconditional branch at its end, get the
5239 store the return the branch edge and the fall-thru edge in
5240 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5241 static void
5242 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5243 edge *fallthru_edge)
5245 edge e = EDGE_SUCC (b, 0);
5247 if (e->flags & EDGE_FALLTHRU)
5249 *fallthru_edge = e;
5250 *branch_edge = EDGE_SUCC (b, 1);
5252 else
5254 *branch_edge = e;
5255 *fallthru_edge = EDGE_SUCC (b, 1);
5259 void
5260 init_rtl_bb_info (basic_block bb)
5262 gcc_assert (!bb->il.x.rtl);
5263 bb->il.x.head_ = NULL;
5264 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5267 static bool
5268 rtl_bb_info_initialized_p (basic_block bb)
5270 return bb->il.x.rtl;
5273 /* Returns true if it is possible to remove edge E by redirecting
5274 it to the destination of the other edge from E->src. */
5276 static bool
5277 rtl_can_remove_branch_p (const_edge e)
5279 const_basic_block src = e->src;
5280 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5281 const rtx_insn *insn = BB_END (src);
5282 rtx set;
5284 /* The conditions are taken from try_redirect_by_replacing_jump. */
5285 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5286 return false;
5288 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5289 return false;
5291 if (BB_PARTITION (src) != BB_PARTITION (target))
5292 return false;
5294 if (!onlyjump_p (insn)
5295 || tablejump_p (insn, NULL, NULL))
5296 return false;
5298 set = single_set (insn);
5299 if (!set || side_effects_p (set))
5300 return false;
5302 return true;
5305 static basic_block
5306 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5308 bb = cfg_layout_duplicate_bb (bb, id);
5309 bb->aux = NULL;
5310 return bb;
5313 /* Do book-keeping of basic block BB for the profile consistency checker.
5314 Store the counting in RECORD. */
5315 static void
5316 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5318 rtx_insn *insn;
5319 FOR_BB_INSNS (bb, insn)
5320 if (INSN_P (insn))
5322 record->size += insn_cost (insn, false);
5323 if (profile_info)
5325 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
5326 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
5327 && bb->count.ipa ().initialized_p ())
5328 record->time
5329 += insn_cost (insn, true) * bb->count.ipa ().to_gcov_type ();
5331 else if (bb->count.initialized_p ()
5332 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5333 record->time
5334 += insn_cost (insn, true)
5335 * bb->count.to_sreal_scale
5336 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
5337 else
5338 record->time += insn_cost (insn, true);
5342 /* Implementation of CFG manipulation for linearized RTL. */
5343 struct cfg_hooks rtl_cfg_hooks = {
5344 "rtl",
5345 rtl_verify_flow_info,
5346 rtl_dump_bb,
5347 rtl_dump_bb_for_graph,
5348 rtl_create_basic_block,
5349 rtl_redirect_edge_and_branch,
5350 rtl_redirect_edge_and_branch_force,
5351 rtl_can_remove_branch_p,
5352 rtl_delete_block,
5353 rtl_split_block,
5354 rtl_move_block_after,
5355 rtl_can_merge_blocks, /* can_merge_blocks_p */
5356 rtl_merge_blocks,
5357 rtl_predict_edge,
5358 rtl_predicted_by_p,
5359 cfg_layout_can_duplicate_bb_p,
5360 rtl_duplicate_bb,
5361 rtl_split_edge,
5362 rtl_make_forwarder_block,
5363 rtl_tidy_fallthru_edge,
5364 rtl_force_nonfallthru,
5365 rtl_block_ends_with_call_p,
5366 rtl_block_ends_with_condjump_p,
5367 rtl_flow_call_edges_add,
5368 NULL, /* execute_on_growing_pred */
5369 NULL, /* execute_on_shrinking_pred */
5370 NULL, /* duplicate loop for trees */
5371 NULL, /* lv_add_condition_to_bb */
5372 NULL, /* lv_adjust_loop_header_phi*/
5373 NULL, /* extract_cond_bb_edges */
5374 NULL, /* flush_pending_stmts */
5375 rtl_block_empty_p, /* block_empty_p */
5376 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5377 rtl_account_profile_record,
5380 /* Implementation of CFG manipulation for cfg layout RTL, where
5381 basic block connected via fallthru edges does not have to be adjacent.
5382 This representation will hopefully become the default one in future
5383 version of the compiler. */
5385 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5386 "cfglayout mode",
5387 rtl_verify_flow_info_1,
5388 rtl_dump_bb,
5389 rtl_dump_bb_for_graph,
5390 cfg_layout_create_basic_block,
5391 cfg_layout_redirect_edge_and_branch,
5392 cfg_layout_redirect_edge_and_branch_force,
5393 rtl_can_remove_branch_p,
5394 cfg_layout_delete_block,
5395 cfg_layout_split_block,
5396 rtl_move_block_after,
5397 cfg_layout_can_merge_blocks_p,
5398 cfg_layout_merge_blocks,
5399 rtl_predict_edge,
5400 rtl_predicted_by_p,
5401 cfg_layout_can_duplicate_bb_p,
5402 cfg_layout_duplicate_bb,
5403 cfg_layout_split_edge,
5404 rtl_make_forwarder_block,
5405 NULL, /* tidy_fallthru_edge */
5406 rtl_force_nonfallthru,
5407 rtl_block_ends_with_call_p,
5408 rtl_block_ends_with_condjump_p,
5409 rtl_flow_call_edges_add,
5410 NULL, /* execute_on_growing_pred */
5411 NULL, /* execute_on_shrinking_pred */
5412 duplicate_loop_body_to_header_edge, /* duplicate loop for rtl */
5413 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5414 NULL, /* lv_adjust_loop_header_phi*/
5415 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5416 NULL, /* flush_pending_stmts */
5417 rtl_block_empty_p, /* block_empty_p */
5418 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5419 rtl_account_profile_record,
5422 #include "gt-cfgrtl.h"
5424 #if __GNUC__ >= 10
5425 # pragma GCC diagnostic pop
5426 #endif