* basic-block.h (FOR_EACH_EDGE): Record initial edge count.
[official-gcc.git] / gcc / bb-reorder.c
blob9abef676700c1c5e578eadc80b2dcc8b1ebf6c34
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* This (greedy) algorithm constructs traces in several rounds.
22 The construction starts from "seeds". The seed for the first round
23 is the entry point of function. When there are more than one seed
24 that one is selected first that has the lowest key in the heap
25 (see function bb_to_key). Then the algorithm repeatedly adds the most
26 probable successor to the end of a trace. Finally it connects the traces.
28 There are two parameters: Branch Threshold and Exec Threshold.
29 If the edge to a successor of the actual basic block is lower than
30 Branch Threshold or the frequency of the successor is lower than
31 Exec Threshold the successor will be the seed in one of the next rounds.
32 Each round has these parameters lower than the previous one.
33 The last round has to have these parameters set to zero
34 so that the remaining blocks are picked up.
36 The algorithm selects the most probable successor from all unvisited
37 successors and successors that have been added to this trace.
38 The other successors (that has not been "sent" to the next round) will be
39 other seeds for this round and the secondary traces will start in them.
40 If the successor has not been visited in this trace it is added to the trace
41 (however, there is some heuristic for simple branches).
42 If the successor has been visited in this trace the loop has been found.
43 If the loop has many iterations the loop is rotated so that the
44 source block of the most probable edge going out from the loop
45 is the last block of the trace.
46 If the loop has few iterations and there is no edge from the last block of
47 the loop going out from loop the loop header is duplicated.
48 Finally, the construction of the trace is terminated.
50 When connecting traces it first checks whether there is an edge from the
51 last block of one trace to the first block of another trace.
52 When there are still some unconnected traces it checks whether there exists
53 a basic block BB such that BB is a successor of the last bb of one trace
54 and BB is a predecessor of the first block of another trace. In this case,
55 BB is duplicated and the traces are connected through this duplicate.
56 The rest of traces are simply connected so there will be a jump to the
57 beginning of the rest of trace.
60 References:
62 "Software Trace Cache"
63 A. Ramirez, J. Larriba-Pey, C. Navarro, J. Torrellas and M. Valero; 1999
64 http://citeseer.nj.nec.com/15361.html
68 #include "config.h"
69 #include "system.h"
70 #include "coretypes.h"
71 #include "tm.h"
72 #include "rtl.h"
73 #include "basic-block.h"
74 #include "flags.h"
75 #include "timevar.h"
76 #include "output.h"
77 #include "cfglayout.h"
78 #include "fibheap.h"
79 #include "target.h"
80 #include "function.h"
81 #include "tm_p.h"
82 #include "obstack.h"
83 #include "expr.h"
84 #include "regs.h"
86 /* The number of rounds. In most cases there will only be 4 rounds, but
87 when partitioning hot and cold basic blocks into separate sections of
88 the .o file there will be an extra round.*/
89 #define N_ROUNDS 5
91 /* Stubs in case we don't have a return insn.
92 We have to check at runtime too, not only compiletime. */
94 #ifndef HAVE_return
95 #define HAVE_return 0
96 #define gen_return() NULL_RTX
97 #endif
100 /* Branch thresholds in thousandths (per mille) of the REG_BR_PROB_BASE. */
101 static int branch_threshold[N_ROUNDS] = {400, 200, 100, 0, 0};
103 /* Exec thresholds in thousandths (per mille) of the frequency of bb 0. */
104 static int exec_threshold[N_ROUNDS] = {500, 200, 50, 0, 0};
106 /* If edge frequency is lower than DUPLICATION_THRESHOLD per mille of entry
107 block the edge destination is not duplicated while connecting traces. */
108 #define DUPLICATION_THRESHOLD 100
110 /* Length of unconditional jump instruction. */
111 static int uncond_jump_length;
113 /* Structure to hold needed information for each basic block. */
114 typedef struct bbro_basic_block_data_def
116 /* Which trace is the bb start of (-1 means it is not a start of a trace). */
117 int start_of_trace;
119 /* Which trace is the bb end of (-1 means it is not an end of a trace). */
120 int end_of_trace;
122 /* Which heap is BB in (if any)? */
123 fibheap_t heap;
125 /* Which heap node is BB in (if any)? */
126 fibnode_t node;
127 } bbro_basic_block_data;
129 /* The current size of the following dynamic array. */
130 static int array_size;
132 /* The array which holds needed information for basic blocks. */
133 static bbro_basic_block_data *bbd;
135 /* To avoid frequent reallocation the size of arrays is greater than needed,
136 the number of elements is (not less than) 1.25 * size_wanted. */
137 #define GET_ARRAY_SIZE(X) ((((X) / 4) + 1) * 5)
139 /* Free the memory and set the pointer to NULL. */
140 #define FREE(P) \
141 do { if (P) { free (P); P = 0; } else { abort (); } } while (0)
143 /* Structure for holding information about a trace. */
144 struct trace
146 /* First and last basic block of the trace. */
147 basic_block first, last;
149 /* The round of the STC creation which this trace was found in. */
150 int round;
152 /* The length (i.e. the number of basic blocks) of the trace. */
153 int length;
156 /* Maximum frequency and count of one of the entry blocks. */
157 int max_entry_frequency;
158 gcov_type max_entry_count;
160 /* Local function prototypes. */
161 static void find_traces (int *, struct trace *);
162 static basic_block rotate_loop (edge, struct trace *, int);
163 static void mark_bb_visited (basic_block, int);
164 static void find_traces_1_round (int, int, gcov_type, struct trace *, int *,
165 int, fibheap_t *, int);
166 static basic_block copy_bb (basic_block, edge, basic_block, int);
167 static fibheapkey_t bb_to_key (basic_block);
168 static bool better_edge_p (basic_block, edge, int, int, int, int, edge);
169 static void connect_traces (int, struct trace *);
170 static bool copy_bb_p (basic_block, int);
171 static int get_uncond_jump_length (void);
172 static bool push_to_next_round_p (basic_block, int, int, int, gcov_type);
173 static void add_unlikely_executed_notes (void);
174 static void find_rarely_executed_basic_blocks_and_crossing_edges (edge *,
175 int *,
176 int *);
177 static void mark_bb_for_unlikely_executed_section (basic_block);
178 static void add_labels_and_missing_jumps (edge *, int);
179 static void add_reg_crossing_jump_notes (void);
180 static void fix_up_fall_thru_edges (void);
181 static void fix_edges_for_rarely_executed_code (edge *, int);
182 static void fix_crossing_conditional_branches (void);
183 static void fix_crossing_unconditional_branches (void);
185 /* Check to see if bb should be pushed into the next round of trace
186 collections or not. Reasons for pushing the block forward are 1).
187 If the block is cold, we are doing partitioning, and there will be
188 another round (cold partition blocks are not supposed to be
189 collected into traces until the very last round); or 2). There will
190 be another round, and the basic block is not "hot enough" for the
191 current round of trace collection. */
193 static bool
194 push_to_next_round_p (basic_block bb, int round, int number_of_rounds,
195 int exec_th, gcov_type count_th)
197 bool there_exists_another_round;
198 bool cold_block;
199 bool block_not_hot_enough;
201 there_exists_another_round = round < number_of_rounds - 1;
203 cold_block = (flag_reorder_blocks_and_partition
204 && bb->partition == COLD_PARTITION);
206 block_not_hot_enough = (bb->frequency < exec_th
207 || bb->count < count_th
208 || probably_never_executed_bb_p (bb));
210 if (there_exists_another_round
211 && (cold_block || block_not_hot_enough))
212 return true;
213 else
214 return false;
217 /* Find the traces for Software Trace Cache. Chain each trace through
218 RBI()->next. Store the number of traces to N_TRACES and description of
219 traces to TRACES. */
221 static void
222 find_traces (int *n_traces, struct trace *traces)
224 int i;
225 int number_of_rounds;
226 edge e;
227 fibheap_t heap;
229 /* Add one extra round of trace collection when partitioning hot/cold
230 basic blocks into separate sections. The last round is for all the
231 cold blocks (and ONLY the cold blocks). */
233 number_of_rounds = N_ROUNDS - 1;
234 if (flag_reorder_blocks_and_partition)
235 number_of_rounds = N_ROUNDS;
237 /* Insert entry points of function into heap. */
238 heap = fibheap_new ();
239 max_entry_frequency = 0;
240 max_entry_count = 0;
241 FOR_EACH_EDGE (e, ENTRY_BLOCK_PTR->succs)
243 bbd[e->dest->index].heap = heap;
244 bbd[e->dest->index].node = fibheap_insert (heap, bb_to_key (e->dest),
245 e->dest);
246 if (e->dest->frequency > max_entry_frequency)
247 max_entry_frequency = e->dest->frequency;
248 if (e->dest->count > max_entry_count)
249 max_entry_count = e->dest->count;
251 END_FOR_EACH_EDGE;
253 /* Find the traces. */
254 for (i = 0; i < number_of_rounds; i++)
256 gcov_type count_threshold;
258 if (dump_file)
259 fprintf (dump_file, "STC - round %d\n", i + 1);
261 if (max_entry_count < INT_MAX / 1000)
262 count_threshold = max_entry_count * exec_threshold[i] / 1000;
263 else
264 count_threshold = max_entry_count / 1000 * exec_threshold[i];
266 find_traces_1_round (REG_BR_PROB_BASE * branch_threshold[i] / 1000,
267 max_entry_frequency * exec_threshold[i] / 1000,
268 count_threshold, traces, n_traces, i, &heap,
269 number_of_rounds);
271 fibheap_delete (heap);
273 if (dump_file)
275 for (i = 0; i < *n_traces; i++)
277 basic_block bb;
278 fprintf (dump_file, "Trace %d (round %d): ", i + 1,
279 traces[i].round + 1);
280 for (bb = traces[i].first; bb != traces[i].last; bb = bb->rbi->next)
281 fprintf (dump_file, "%d [%d] ", bb->index, bb->frequency);
282 fprintf (dump_file, "%d [%d]\n", bb->index, bb->frequency);
284 fflush (dump_file);
288 /* Rotate loop whose back edge is BACK_EDGE in the tail of trace TRACE
289 (with sequential number TRACE_N). */
291 static basic_block
292 rotate_loop (edge back_edge, struct trace *trace, int trace_n)
294 basic_block bb;
296 /* Information about the best end (end after rotation) of the loop. */
297 basic_block best_bb = NULL;
298 edge best_edge = NULL;
299 int best_freq = -1;
300 gcov_type best_count = -1;
301 /* The best edge is preferred when its destination is not visited yet
302 or is a start block of some trace. */
303 bool is_preferred = false;
305 /* Find the most frequent edge that goes out from current trace. */
306 bb = back_edge->dest;
309 edge e;
311 FOR_EACH_EDGE (e, bb->succs)
312 if (e->dest != EXIT_BLOCK_PTR
313 && e->dest->rbi->visited != trace_n
314 && (e->flags & EDGE_CAN_FALLTHRU)
315 && !(e->flags & EDGE_COMPLEX))
317 if (is_preferred)
319 /* The best edge is preferred. */
320 if (!e->dest->rbi->visited
321 || bbd[e->dest->index].start_of_trace >= 0)
323 /* The current edge E is also preferred. */
324 int freq = EDGE_FREQUENCY (e);
325 if (freq > best_freq || e->count > best_count)
327 best_freq = freq;
328 best_count = e->count;
329 best_edge = e;
330 best_bb = bb;
334 else
336 if (!e->dest->rbi->visited
337 || bbd[e->dest->index].start_of_trace >= 0)
339 /* The current edge E is preferred. */
340 is_preferred = true;
341 best_freq = EDGE_FREQUENCY (e);
342 best_count = e->count;
343 best_edge = e;
344 best_bb = bb;
346 else
348 int freq = EDGE_FREQUENCY (e);
349 if (!best_edge || freq > best_freq || e->count > best_count)
351 best_freq = freq;
352 best_count = e->count;
353 best_edge = e;
354 best_bb = bb;
359 END_FOR_EACH_EDGE;
360 bb = bb->rbi->next;
362 while (bb != back_edge->dest);
364 if (best_bb)
366 /* Rotate the loop so that the BEST_EDGE goes out from the last block of
367 the trace. */
368 if (back_edge->dest == trace->first)
370 trace->first = best_bb->rbi->next;
372 else
374 basic_block prev_bb;
376 for (prev_bb = trace->first;
377 prev_bb->rbi->next != back_edge->dest;
378 prev_bb = prev_bb->rbi->next)
380 prev_bb->rbi->next = best_bb->rbi->next;
382 /* Try to get rid of uncond jump to cond jump. */
383 if (EDGE_COUNT (prev_bb->succs) == 1)
385 basic_block header = EDGE_SUCC (prev_bb, 0)->dest;
387 /* Duplicate HEADER if it is a small block containing cond jump
388 in the end. */
389 if (any_condjump_p (BB_END (header)) && copy_bb_p (header, 0))
391 copy_bb (header, EDGE_SUCC (prev_bb, 0), prev_bb, trace_n);
396 else
398 /* We have not found suitable loop tail so do no rotation. */
399 best_bb = back_edge->src;
401 best_bb->rbi->next = NULL;
402 return best_bb;
405 /* This function marks BB that it was visited in trace number TRACE. */
407 static void
408 mark_bb_visited (basic_block bb, int trace)
410 bb->rbi->visited = trace;
411 if (bbd[bb->index].heap)
413 fibheap_delete_node (bbd[bb->index].heap, bbd[bb->index].node);
414 bbd[bb->index].heap = NULL;
415 bbd[bb->index].node = NULL;
419 /* One round of finding traces. Find traces for BRANCH_TH and EXEC_TH i.e. do
420 not include basic blocks their probability is lower than BRANCH_TH or their
421 frequency is lower than EXEC_TH into traces (or count is lower than
422 COUNT_TH). It stores the new traces into TRACES and modifies the number of
423 traces *N_TRACES. Sets the round (which the trace belongs to) to ROUND. It
424 expects that starting basic blocks are in *HEAP and at the end it deletes
425 *HEAP and stores starting points for the next round into new *HEAP. */
427 static void
428 find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
429 struct trace *traces, int *n_traces, int round,
430 fibheap_t *heap, int number_of_rounds)
432 /* The following variable refers to the last round in which non-"cold"
433 blocks may be collected into a trace. */
435 int last_round = N_ROUNDS - 1;
437 /* Heap for discarded basic blocks which are possible starting points for
438 the next round. */
439 fibheap_t new_heap = fibheap_new ();
441 while (!fibheap_empty (*heap))
443 basic_block bb;
444 struct trace *trace;
445 edge best_edge, e;
446 fibheapkey_t key;
448 bb = fibheap_extract_min (*heap);
449 bbd[bb->index].heap = NULL;
450 bbd[bb->index].node = NULL;
452 if (dump_file)
453 fprintf (dump_file, "Getting bb %d\n", bb->index);
455 /* If the BB's frequency is too low send BB to the next round. When
456 partitioning hot/cold blocks into separate sections, make sure all
457 the cold blocks (and ONLY the cold blocks) go into the (extra) final
458 round. */
460 if (push_to_next_round_p (bb, round, number_of_rounds, exec_th,
461 count_th))
463 int key = bb_to_key (bb);
464 bbd[bb->index].heap = new_heap;
465 bbd[bb->index].node = fibheap_insert (new_heap, key, bb);
467 if (dump_file)
468 fprintf (dump_file,
469 " Possible start point of next round: %d (key: %d)\n",
470 bb->index, key);
471 continue;
474 trace = traces + *n_traces;
475 trace->first = bb;
476 trace->round = round;
477 trace->length = 0;
478 (*n_traces)++;
482 int prob, freq;
484 /* The probability and frequency of the best edge. */
485 int best_prob = INT_MIN / 2;
486 int best_freq = INT_MIN / 2;
488 best_edge = NULL;
489 mark_bb_visited (bb, *n_traces);
490 trace->length++;
492 if (dump_file)
493 fprintf (dump_file, "Basic block %d was visited in trace %d\n",
494 bb->index, *n_traces - 1);
496 /* Select the successor that will be placed after BB. */
497 FOR_EACH_EDGE (e, bb->succs)
499 #ifdef ENABLE_CHECKING
500 if (e->flags & EDGE_FAKE)
501 abort ();
502 #endif
504 if (e->dest == EXIT_BLOCK_PTR)
505 continue;
507 if (e->dest->rbi->visited
508 && e->dest->rbi->visited != *n_traces)
509 continue;
511 if (e->dest->partition == COLD_PARTITION
512 && round < last_round)
513 continue;
515 prob = e->probability;
516 freq = EDGE_FREQUENCY (e);
518 /* Edge that cannot be fallthru or improbable or infrequent
519 successor (ie. it is unsuitable successor). */
520 if (!(e->flags & EDGE_CAN_FALLTHRU) || (e->flags & EDGE_COMPLEX)
521 || prob < branch_th || freq < exec_th || e->count < count_th)
522 continue;
524 /* If partitioning hot/cold basic blocks, don't consider edges
525 that cross section boundaries. */
527 if (better_edge_p (bb, e, prob, freq, best_prob, best_freq,
528 best_edge))
530 best_edge = e;
531 best_prob = prob;
532 best_freq = freq;
535 END_FOR_EACH_EDGE;
537 /* If the best destination has multiple predecessors, and can be
538 duplicated cheaper than a jump, don't allow it to be added
539 to a trace. We'll duplicate it when connecting traces. */
540 if (best_edge && EDGE_COUNT (best_edge->dest->preds) >= 2
541 && copy_bb_p (best_edge->dest, 0))
542 best_edge = NULL;
544 /* Add all non-selected successors to the heaps. */
545 FOR_EACH_EDGE (e, bb->succs)
547 if (e == best_edge
548 || e->dest == EXIT_BLOCK_PTR
549 || e->dest->rbi->visited)
550 continue;
552 key = bb_to_key (e->dest);
554 if (bbd[e->dest->index].heap)
556 /* E->DEST is already in some heap. */
557 if (key != bbd[e->dest->index].node->key)
559 if (dump_file)
561 fprintf (dump_file,
562 "Changing key for bb %d from %ld to %ld.\n",
563 e->dest->index,
564 (long) bbd[e->dest->index].node->key,
565 key);
567 fibheap_replace_key (bbd[e->dest->index].heap,
568 bbd[e->dest->index].node, key);
571 else
573 fibheap_t which_heap = *heap;
575 prob = e->probability;
576 freq = EDGE_FREQUENCY (e);
578 if (!(e->flags & EDGE_CAN_FALLTHRU)
579 || (e->flags & EDGE_COMPLEX)
580 || prob < branch_th || freq < exec_th
581 || e->count < count_th)
583 /* When partitioning hot/cold basic blocks, make sure
584 the cold blocks (and only the cold blocks) all get
585 pushed to the last round of trace collection. */
587 if (push_to_next_round_p (e->dest, round,
588 number_of_rounds,
589 exec_th, count_th))
590 which_heap = new_heap;
593 bbd[e->dest->index].heap = which_heap;
594 bbd[e->dest->index].node = fibheap_insert (which_heap,
595 key, e->dest);
597 if (dump_file)
599 fprintf (dump_file,
600 " Possible start of %s round: %d (key: %ld)\n",
601 (which_heap == new_heap) ? "next" : "this",
602 e->dest->index, (long) key);
606 END_FOR_EACH_EDGE;
608 if (best_edge) /* Suitable successor was found. */
610 if (best_edge->dest->rbi->visited == *n_traces)
612 /* We do nothing with one basic block loops. */
613 if (best_edge->dest != bb)
615 if (EDGE_FREQUENCY (best_edge)
616 > 4 * best_edge->dest->frequency / 5)
618 /* The loop has at least 4 iterations. If the loop
619 header is not the first block of the function
620 we can rotate the loop. */
622 if (best_edge->dest != ENTRY_BLOCK_PTR->next_bb)
624 if (dump_file)
626 fprintf (dump_file,
627 "Rotating loop %d - %d\n",
628 best_edge->dest->index, bb->index);
630 bb->rbi->next = best_edge->dest;
631 bb = rotate_loop (best_edge, trace, *n_traces);
634 else
636 /* The loop has less than 4 iterations. */
638 /* Check whether there is another edge from BB. */
639 edge another_edge;
640 FOR_EACH_EDGE (another_edge, bb->succs)
642 if (another_edge != best_edge)
643 break;
645 END_FOR_EACH_EDGE;
647 if (!another_edge && copy_bb_p (best_edge->dest,
648 !optimize_size))
650 bb = copy_bb (best_edge->dest, best_edge, bb,
651 *n_traces);
656 /* Terminate the trace. */
657 break;
659 else
661 /* Check for a situation
669 where
670 EDGE_FREQUENCY (AB) + EDGE_FREQUENCY (BC)
671 >= EDGE_FREQUENCY (AC).
672 (i.e. 2 * B->frequency >= EDGE_FREQUENCY (AC) )
673 Best ordering is then A B C.
675 This situation is created for example by:
677 if (A) B;
682 FOR_EACH_EDGE (e, bb->succs)
684 if (e != best_edge
685 && (e->flags & EDGE_CAN_FALLTHRU)
686 && !(e->flags & EDGE_COMPLEX)
687 && !e->dest->rbi->visited
688 && EDGE_COUNT (e->dest->preds) == 1
689 && !e->crossing_edge
690 && EDGE_COUNT (e->dest->succs) == 1
691 && (EDGE_SUCC (e->dest, 0)->flags & EDGE_CAN_FALLTHRU)
692 && !(EDGE_SUCC (e->dest, 0)->flags & EDGE_COMPLEX)
693 && EDGE_SUCC (e->dest, 0)->dest == best_edge->dest
694 && 2 * e->dest->frequency >= EDGE_FREQUENCY (best_edge))
696 best_edge = e;
697 if (dump_file)
698 fprintf (dump_file, "Selecting BB %d\n",
699 best_edge->dest->index);
700 break;
703 END_FOR_EACH_EDGE;
705 bb->rbi->next = best_edge->dest;
706 bb = best_edge->dest;
710 while (best_edge);
711 trace->last = bb;
712 bbd[trace->first->index].start_of_trace = *n_traces - 1;
713 bbd[trace->last->index].end_of_trace = *n_traces - 1;
715 /* The trace is terminated so we have to recount the keys in heap
716 (some block can have a lower key because now one of its predecessors
717 is an end of the trace). */
718 FOR_EACH_EDGE (e, bb->succs)
720 if (e->dest == EXIT_BLOCK_PTR
721 || e->dest->rbi->visited)
722 continue;
724 if (bbd[e->dest->index].heap)
726 key = bb_to_key (e->dest);
727 if (key != bbd[e->dest->index].node->key)
729 if (dump_file)
731 fprintf (dump_file,
732 "Changing key for bb %d from %ld to %ld.\n",
733 e->dest->index,
734 (long) bbd[e->dest->index].node->key, key);
736 fibheap_replace_key (bbd[e->dest->index].heap,
737 bbd[e->dest->index].node,
738 key);
742 END_FOR_EACH_EDGE;
745 fibheap_delete (*heap);
747 /* "Return" the new heap. */
748 *heap = new_heap;
751 /* Create a duplicate of the basic block OLD_BB and redirect edge E to it, add
752 it to trace after BB, mark OLD_BB visited and update pass' data structures
753 (TRACE is a number of trace which OLD_BB is duplicated to). */
755 static basic_block
756 copy_bb (basic_block old_bb, edge e, basic_block bb, int trace)
758 basic_block new_bb;
760 new_bb = duplicate_block (old_bb, e);
761 if (e->dest != new_bb)
762 abort ();
763 if (e->dest->rbi->visited)
764 abort ();
765 if (dump_file)
766 fprintf (dump_file,
767 "Duplicated bb %d (created bb %d)\n",
768 old_bb->index, new_bb->index);
769 new_bb->rbi->visited = trace;
770 new_bb->rbi->next = bb->rbi->next;
771 bb->rbi->next = new_bb;
773 if (new_bb->index >= array_size || last_basic_block > array_size)
775 int i;
776 int new_size;
778 new_size = MAX (last_basic_block, new_bb->index + 1);
779 new_size = GET_ARRAY_SIZE (new_size);
780 bbd = xrealloc (bbd, new_size * sizeof (bbro_basic_block_data));
781 for (i = array_size; i < new_size; i++)
783 bbd[i].start_of_trace = -1;
784 bbd[i].end_of_trace = -1;
785 bbd[i].heap = NULL;
786 bbd[i].node = NULL;
788 array_size = new_size;
790 if (dump_file)
792 fprintf (dump_file,
793 "Growing the dynamic array to %d elements.\n",
794 array_size);
798 return new_bb;
801 /* Compute and return the key (for the heap) of the basic block BB. */
803 static fibheapkey_t
804 bb_to_key (basic_block bb)
806 edge e;
808 int priority = 0;
810 /* Do not start in probably never executed blocks. */
812 if (bb->partition == COLD_PARTITION || probably_never_executed_bb_p (bb))
813 return BB_FREQ_MAX;
815 /* Prefer blocks whose predecessor is an end of some trace
816 or whose predecessor edge is EDGE_DFS_BACK. */
817 FOR_EACH_EDGE (e, bb->preds)
819 if ((e->src != ENTRY_BLOCK_PTR && bbd[e->src->index].end_of_trace >= 0)
820 || (e->flags & EDGE_DFS_BACK))
822 int edge_freq = EDGE_FREQUENCY (e);
824 if (edge_freq > priority)
825 priority = edge_freq;
828 END_FOR_EACH_EDGE;
830 if (priority)
831 /* The block with priority should have significantly lower key. */
832 return -(100 * BB_FREQ_MAX + 100 * priority + bb->frequency);
833 return -bb->frequency;
836 /* Return true when the edge E from basic block BB is better than the temporary
837 best edge (details are in function). The probability of edge E is PROB. The
838 frequency of the successor is FREQ. The current best probability is
839 BEST_PROB, the best frequency is BEST_FREQ.
840 The edge is considered to be equivalent when PROB does not differ much from
841 BEST_PROB; similarly for frequency. */
843 static bool
844 better_edge_p (basic_block bb, edge e, int prob, int freq, int best_prob,
845 int best_freq, edge cur_best_edge)
847 bool is_better_edge;
849 /* The BEST_* values do not have to be best, but can be a bit smaller than
850 maximum values. */
851 int diff_prob = best_prob / 10;
852 int diff_freq = best_freq / 10;
854 if (prob > best_prob + diff_prob)
855 /* The edge has higher probability than the temporary best edge. */
856 is_better_edge = true;
857 else if (prob < best_prob - diff_prob)
858 /* The edge has lower probability than the temporary best edge. */
859 is_better_edge = false;
860 else if (freq < best_freq - diff_freq)
861 /* The edge and the temporary best edge have almost equivalent
862 probabilities. The higher frequency of a successor now means
863 that there is another edge going into that successor.
864 This successor has lower frequency so it is better. */
865 is_better_edge = true;
866 else if (freq > best_freq + diff_freq)
867 /* This successor has higher frequency so it is worse. */
868 is_better_edge = false;
869 else if (e->dest->prev_bb == bb)
870 /* The edges have equivalent probabilities and the successors
871 have equivalent frequencies. Select the previous successor. */
872 is_better_edge = true;
873 else
874 is_better_edge = false;
876 /* If we are doing hot/cold partitioning, make sure that we always favor
877 non-crossing edges over crossing edges. */
879 if (!is_better_edge
880 && flag_reorder_blocks_and_partition
881 && cur_best_edge
882 && cur_best_edge->crossing_edge
883 && !e->crossing_edge)
884 is_better_edge = true;
886 return is_better_edge;
889 /* Connect traces in array TRACES, N_TRACES is the count of traces. */
891 static void
892 connect_traces (int n_traces, struct trace *traces)
894 int i;
895 int unconnected_hot_trace_count = 0;
896 bool cold_connected = true;
897 bool *connected;
898 bool *cold_traces;
899 int last_trace;
900 int freq_threshold;
901 gcov_type count_threshold;
903 freq_threshold = max_entry_frequency * DUPLICATION_THRESHOLD / 1000;
904 if (max_entry_count < INT_MAX / 1000)
905 count_threshold = max_entry_count * DUPLICATION_THRESHOLD / 1000;
906 else
907 count_threshold = max_entry_count / 1000 * DUPLICATION_THRESHOLD;
909 connected = xcalloc (n_traces, sizeof (bool));
910 last_trace = -1;
912 /* If we are partitioning hot/cold basic blocks, mark the cold
913 traces as already connected, to remove them from consideration
914 for connection to the hot traces. After the hot traces have all
915 been connected (determined by "unconnected_hot_trace_count"), we
916 will go back and connect the cold traces. */
918 cold_traces = xcalloc (n_traces, sizeof (bool));
920 if (flag_reorder_blocks_and_partition)
921 for (i = 0; i < n_traces; i++)
923 if (traces[i].first->partition == COLD_PARTITION)
925 connected[i] = true;
926 cold_traces[i] = true;
927 cold_connected = false;
929 else
930 unconnected_hot_trace_count++;
933 for (i = 0; i < n_traces || !cold_connected ; i++)
935 int t = i;
936 int t2;
937 edge e, best;
938 int best_len;
940 /* If we are partitioning hot/cold basic blocks, check to see
941 if all the hot traces have been connected. If so, go back
942 and mark the cold traces as unconnected so we can connect
943 them up too. Re-set "i" to the first (unconnected) cold
944 trace. Use flag "cold_connected" to make sure we don't do
945 this step more than once. */
947 if (flag_reorder_blocks_and_partition
948 && (i >= n_traces || unconnected_hot_trace_count <= 0)
949 && !cold_connected)
951 int j;
952 int first_cold_trace = -1;
954 for (j = 0; j < n_traces; j++)
955 if (cold_traces[j])
957 connected[j] = false;
958 if (first_cold_trace == -1)
959 first_cold_trace = j;
961 i = t = first_cold_trace;
962 cold_connected = true;
965 if (connected[t])
966 continue;
968 connected[t] = true;
969 if (unconnected_hot_trace_count > 0)
970 unconnected_hot_trace_count--;
972 /* Find the predecessor traces. */
973 for (t2 = t; t2 > 0;)
975 best = NULL;
976 best_len = 0;
977 FOR_EACH_EDGE (e, traces[t2].first->preds)
979 int si = e->src->index;
981 if (e->src != ENTRY_BLOCK_PTR
982 && (e->flags & EDGE_CAN_FALLTHRU)
983 && !(e->flags & EDGE_COMPLEX)
984 && bbd[si].end_of_trace >= 0
985 && !connected[bbd[si].end_of_trace]
986 && (!best
987 || e->probability > best->probability
988 || (e->probability == best->probability
989 && traces[bbd[si].end_of_trace].length > best_len)))
991 best = e;
992 best_len = traces[bbd[si].end_of_trace].length;
995 END_FOR_EACH_EDGE;
997 if (best)
999 best->src->rbi->next = best->dest;
1000 t2 = bbd[best->src->index].end_of_trace;
1001 connected[t2] = true;
1003 if (unconnected_hot_trace_count > 0)
1004 unconnected_hot_trace_count--;
1006 if (dump_file)
1008 fprintf (dump_file, "Connection: %d %d\n",
1009 best->src->index, best->dest->index);
1012 else
1013 break;
1016 if (last_trace >= 0)
1017 traces[last_trace].last->rbi->next = traces[t2].first;
1018 last_trace = t;
1020 /* Find the successor traces. */
1021 while (1)
1023 /* Find the continuation of the chain. */
1024 best = NULL;
1025 best_len = 0;
1026 FOR_EACH_EDGE (e, traces[t].last->succs)
1028 int di = e->dest->index;
1030 if (e->dest != EXIT_BLOCK_PTR
1031 && (e->flags & EDGE_CAN_FALLTHRU)
1032 && !(e->flags & EDGE_COMPLEX)
1033 && bbd[di].start_of_trace >= 0
1034 && !connected[bbd[di].start_of_trace]
1035 && (!best
1036 || e->probability > best->probability
1037 || (e->probability == best->probability
1038 && traces[bbd[di].start_of_trace].length > best_len)))
1040 best = e;
1041 best_len = traces[bbd[di].start_of_trace].length;
1044 END_FOR_EACH_EDGE;
1046 if (best)
1048 if (dump_file)
1050 fprintf (dump_file, "Connection: %d %d\n",
1051 best->src->index, best->dest->index);
1053 t = bbd[best->dest->index].start_of_trace;
1054 traces[last_trace].last->rbi->next = traces[t].first;
1055 connected[t] = true;
1056 if (unconnected_hot_trace_count > 0)
1057 unconnected_hot_trace_count--;
1058 last_trace = t;
1060 else
1062 /* Try to connect the traces by duplication of 1 block. */
1063 edge e2;
1064 basic_block next_bb = NULL;
1065 bool try_copy = false;
1067 FOR_EACH_EDGE (e, traces[t].last->succs)
1069 if (e->dest != EXIT_BLOCK_PTR
1070 && (e->flags & EDGE_CAN_FALLTHRU)
1071 && !(e->flags & EDGE_COMPLEX)
1072 && (!best || e->probability > best->probability))
1074 edge best2 = NULL;
1075 int best2_len = 0;
1077 /* If the destination is a start of a trace which is only
1078 one block long, then no need to search the successor
1079 blocks of the trace. Accept it. */
1080 if (bbd[e->dest->index].start_of_trace >= 0
1081 && traces[bbd[e->dest->index].start_of_trace].length
1082 == 1)
1084 best = e;
1085 try_copy = true;
1086 continue;
1089 FOR_EACH_EDGE (e2, e->dest->succs)
1091 int di = e2->dest->index;
1093 if (e2->dest == EXIT_BLOCK_PTR
1094 || ((e2->flags & EDGE_CAN_FALLTHRU)
1095 && !(e2->flags & EDGE_COMPLEX)
1096 && bbd[di].start_of_trace >= 0
1097 && !connected[bbd[di].start_of_trace]
1098 && (EDGE_FREQUENCY (e2) >= freq_threshold)
1099 && (e2->count >= count_threshold)
1100 && (!best2
1101 || e2->probability > best2->probability
1102 || (e2->probability == best2->probability
1103 && traces[bbd[di].start_of_trace].length
1104 > best2_len))))
1106 best = e;
1107 best2 = e2;
1108 if (e2->dest != EXIT_BLOCK_PTR)
1109 best2_len = traces[bbd[di].start_of_trace].length;
1110 else
1111 best2_len = INT_MAX;
1112 next_bb = e2->dest;
1113 try_copy = true;
1116 END_FOR_EACH_EDGE;
1119 END_FOR_EACH_EDGE;
1121 if (flag_reorder_blocks_and_partition)
1122 try_copy = false;
1124 /* Copy tiny blocks always; copy larger blocks only when the
1125 edge is traversed frequently enough. */
1126 if (try_copy
1127 && copy_bb_p (best->dest,
1128 !optimize_size
1129 && EDGE_FREQUENCY (best) >= freq_threshold
1130 && best->count >= count_threshold))
1132 basic_block new_bb;
1134 if (dump_file)
1136 fprintf (dump_file, "Connection: %d %d ",
1137 traces[t].last->index, best->dest->index);
1138 if (!next_bb)
1139 fputc ('\n', dump_file);
1140 else if (next_bb == EXIT_BLOCK_PTR)
1141 fprintf (dump_file, "exit\n");
1142 else
1143 fprintf (dump_file, "%d\n", next_bb->index);
1146 new_bb = copy_bb (best->dest, best, traces[t].last, t);
1147 traces[t].last = new_bb;
1148 if (next_bb && next_bb != EXIT_BLOCK_PTR)
1150 t = bbd[next_bb->index].start_of_trace;
1151 traces[last_trace].last->rbi->next = traces[t].first;
1152 connected[t] = true;
1153 if (unconnected_hot_trace_count > 0)
1154 unconnected_hot_trace_count--;
1155 last_trace = t;
1157 else
1158 break; /* Stop finding the successor traces. */
1160 else
1161 break; /* Stop finding the successor traces. */
1166 if (dump_file)
1168 basic_block bb;
1170 fprintf (dump_file, "Final order:\n");
1171 for (bb = traces[0].first; bb; bb = bb->rbi->next)
1172 fprintf (dump_file, "%d ", bb->index);
1173 fprintf (dump_file, "\n");
1174 fflush (dump_file);
1177 FREE (connected);
1178 FREE (cold_traces);
1181 /* Return true when BB can and should be copied. CODE_MAY_GROW is true
1182 when code size is allowed to grow by duplication. */
1184 static bool
1185 copy_bb_p (basic_block bb, int code_may_grow)
1187 int size = 0;
1188 int max_size = uncond_jump_length;
1189 rtx insn;
1191 if (!bb->frequency)
1192 return false;
1193 if (EDGE_COUNT (bb->preds) < 2)
1194 return false;
1195 if (!can_duplicate_block_p (bb))
1196 return false;
1198 /* Avoid duplicating blocks which have many successors (PR/13430). */
1199 if (EDGE_COUNT (bb->succs) > 8)
1200 return false;
1202 if (code_may_grow && maybe_hot_bb_p (bb))
1203 max_size *= 8;
1205 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1206 insn = NEXT_INSN (insn))
1208 if (INSN_P (insn))
1209 size += get_attr_length (insn);
1212 if (size <= max_size)
1213 return true;
1215 if (dump_file)
1217 fprintf (dump_file,
1218 "Block %d can't be copied because its size = %d.\n",
1219 bb->index, size);
1222 return false;
1225 /* Return the length of unconditional jump instruction. */
1227 static int
1228 get_uncond_jump_length (void)
1230 rtx label, jump;
1231 int length;
1233 label = emit_label_before (gen_label_rtx (), get_insns ());
1234 jump = emit_jump_insn (gen_jump (label));
1236 length = get_attr_length (jump);
1238 delete_insn (jump);
1239 delete_insn (label);
1240 return length;
1243 static void
1244 add_unlikely_executed_notes (void)
1246 basic_block bb;
1248 FOR_EACH_BB (bb)
1249 if (bb->partition == COLD_PARTITION)
1250 mark_bb_for_unlikely_executed_section (bb);
1253 /* Find the basic blocks that are rarely executed and need to be moved to
1254 a separate section of the .o file (to cut down on paging and improve
1255 cache locality). */
1257 static void
1258 find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
1259 int *n_crossing_edges,
1260 int *max_idx)
1262 basic_block bb;
1263 edge e;
1264 int i;
1266 /* Mark which partition (hot/cold) each basic block belongs in. */
1268 FOR_EACH_BB (bb)
1270 if (probably_never_executed_bb_p (bb))
1271 bb->partition = COLD_PARTITION;
1272 else
1273 bb->partition = HOT_PARTITION;
1276 /* Mark every edge that crosses between sections. */
1278 i = 0;
1279 FOR_EACH_BB (bb)
1280 FOR_EACH_EDGE (e, bb->succs)
1282 if (e->src != ENTRY_BLOCK_PTR
1283 && e->dest != EXIT_BLOCK_PTR
1284 && e->src->partition != e->dest->partition)
1286 e->crossing_edge = true;
1287 if (i == *max_idx)
1289 *max_idx *= 2;
1290 crossing_edges = xrealloc (crossing_edges,
1291 (*max_idx) * sizeof (edge));
1293 crossing_edges[i++] = e;
1295 else
1296 e->crossing_edge = false;
1298 END_FOR_EACH_EDGE;
1300 *n_crossing_edges = i;
1303 /* Add NOTE_INSN_UNLIKELY_EXECUTED_CODE to top of basic block. This note
1304 is later used to mark the basic block to be put in the
1305 unlikely-to-be-executed section of the .o file. */
1307 static void
1308 mark_bb_for_unlikely_executed_section (basic_block bb)
1310 rtx cur_insn;
1311 rtx insert_insn = NULL;
1312 rtx new_note;
1314 /* Find first non-note instruction and insert new NOTE before it (as
1315 long as new NOTE is not first instruction in basic block). */
1317 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1318 cur_insn = NEXT_INSN (cur_insn))
1319 if (!NOTE_P (cur_insn)
1320 && !LABEL_P (cur_insn))
1322 insert_insn = cur_insn;
1323 break;
1326 /* Insert note and assign basic block number to it. */
1328 if (insert_insn)
1330 new_note = emit_note_before (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
1331 insert_insn);
1332 NOTE_BASIC_BLOCK (new_note) = bb;
1334 else
1336 new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
1337 BB_END (bb));
1338 NOTE_BASIC_BLOCK (new_note) = bb;
1342 /* If any destination of a crossing edge does not have a label, add label;
1343 Convert any fall-through crossing edges (for blocks that do not contain
1344 a jump) to unconditional jumps. */
1346 static void
1347 add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
1349 int i;
1350 basic_block src;
1351 basic_block dest;
1352 rtx label;
1353 rtx barrier;
1354 rtx new_jump;
1356 for (i=0; i < n_crossing_edges; i++)
1358 if (crossing_edges[i])
1360 src = crossing_edges[i]->src;
1361 dest = crossing_edges[i]->dest;
1363 /* Make sure dest has a label. */
1365 if (dest && (dest != EXIT_BLOCK_PTR))
1367 label = block_label (dest);
1369 /* Make sure source block ends with a jump. */
1371 if (src && (src != ENTRY_BLOCK_PTR))
1373 if (!JUMP_P (BB_END (src)))
1374 /* bb just falls through. */
1376 /* make sure there's only one successor */
1377 if (EDGE_COUNT (src->succs) == 1)
1379 /* Find label in dest block. */
1380 label = block_label (dest);
1382 new_jump = emit_jump_insn_after (gen_jump (label),
1383 BB_END (src));
1384 barrier = emit_barrier_after (new_jump);
1385 JUMP_LABEL (new_jump) = label;
1386 LABEL_NUSES (label) += 1;
1387 src->rbi->footer = unlink_insn_chain (barrier,
1388 barrier);
1389 /* Mark edge as non-fallthru. */
1390 crossing_edges[i]->flags &= ~EDGE_FALLTHRU;
1392 else
1394 /* Basic block has two successors, but
1395 doesn't end in a jump; something is wrong
1396 here! */
1397 abort();
1399 } /* end: 'if (GET_CODE ... ' */
1400 } /* end: 'if (src && src->index...' */
1401 } /* end: 'if (dest && dest->index...' */
1402 } /* end: 'if (crossing_edges[i]...' */
1403 } /* end for loop */
1406 /* Find any bb's where the fall-through edge is a crossing edge (note that
1407 these bb's must also contain a conditional jump; we've already
1408 dealt with fall-through edges for blocks that didn't have a
1409 conditional jump in the call to add_labels_and_missing_jumps).
1410 Convert the fall-through edge to non-crossing edge by inserting a
1411 new bb to fall-through into. The new bb will contain an
1412 unconditional jump (crossing edge) to the original fall through
1413 destination. */
1415 static void
1416 fix_up_fall_thru_edges (void)
1418 basic_block cur_bb;
1419 basic_block new_bb;
1420 edge succ1;
1421 edge succ2;
1422 edge fall_thru;
1423 edge cond_jump = NULL;
1424 edge e;
1425 bool cond_jump_crosses;
1426 int invert_worked;
1427 rtx old_jump;
1428 rtx fall_thru_label;
1429 rtx barrier;
1431 FOR_EACH_BB (cur_bb)
1433 fall_thru = NULL;
1434 if (EDGE_COUNT (cur_bb->succs) > 0)
1435 succ1 = EDGE_SUCC (cur_bb, 0);
1436 else
1437 succ1 = NULL;
1439 if (EDGE_COUNT (cur_bb->succs) > 1)
1440 succ2 = EDGE_SUCC (cur_bb, 1);
1441 else
1442 succ2 = NULL;
1444 /* Find the fall-through edge. */
1446 if (succ1
1447 && (succ1->flags & EDGE_FALLTHRU))
1449 fall_thru = succ1;
1450 cond_jump = succ2;
1452 else if (succ2
1453 && (succ2->flags & EDGE_FALLTHRU))
1455 fall_thru = succ2;
1456 cond_jump = succ1;
1459 if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR))
1461 /* Check to see if the fall-thru edge is a crossing edge. */
1463 if (fall_thru->crossing_edge)
1465 /* The fall_thru edge crosses; now check the cond jump edge, if
1466 it exists. */
1468 cond_jump_crosses = true;
1469 invert_worked = 0;
1470 old_jump = BB_END (cur_bb);
1472 /* Find the jump instruction, if there is one. */
1474 if (cond_jump)
1476 if (!cond_jump->crossing_edge)
1477 cond_jump_crosses = false;
1479 /* We know the fall-thru edge crosses; if the cond
1480 jump edge does NOT cross, and its destination is the
1481 next block in the bb order, invert the jump
1482 (i.e. fix it so the fall thru does not cross and
1483 the cond jump does). */
1485 if (!cond_jump_crosses
1486 && cur_bb->rbi->next == cond_jump->dest)
1488 /* Find label in fall_thru block. We've already added
1489 any missing labels, so there must be one. */
1491 fall_thru_label = block_label (fall_thru->dest);
1493 if (old_jump && fall_thru_label)
1494 invert_worked = invert_jump (old_jump,
1495 fall_thru_label,0);
1496 if (invert_worked)
1498 fall_thru->flags &= ~EDGE_FALLTHRU;
1499 cond_jump->flags |= EDGE_FALLTHRU;
1500 update_br_prob_note (cur_bb);
1501 e = fall_thru;
1502 fall_thru = cond_jump;
1503 cond_jump = e;
1504 cond_jump->crossing_edge = true;
1505 fall_thru->crossing_edge = false;
1510 if (cond_jump_crosses || !invert_worked)
1512 /* This is the case where both edges out of the basic
1513 block are crossing edges. Here we will fix up the
1514 fall through edge. The jump edge will be taken care
1515 of later. */
1517 new_bb = force_nonfallthru (fall_thru);
1519 if (new_bb)
1521 new_bb->rbi->next = cur_bb->rbi->next;
1522 cur_bb->rbi->next = new_bb;
1524 /* Make sure new fall-through bb is in same
1525 partition as bb it's falling through from. */
1527 new_bb->partition = cur_bb->partition;
1528 EDGE_SUCC (new_bb, 0)->crossing_edge = true;
1531 /* Add barrier after new jump */
1533 if (new_bb)
1535 barrier = emit_barrier_after (BB_END (new_bb));
1536 new_bb->rbi->footer = unlink_insn_chain (barrier,
1537 barrier);
1539 else
1541 barrier = emit_barrier_after (BB_END (cur_bb));
1542 cur_bb->rbi->footer = unlink_insn_chain (barrier,
1543 barrier);
1551 /* This function checks the destination blockof a "crossing jump" to
1552 see if it has any crossing predecessors that begin with a code label
1553 and end with an unconditional jump. If so, it returns that predecessor
1554 block. (This is to avoid creating lots of new basic blocks that all
1555 contain unconditional jumps to the same destination). */
1557 static basic_block
1558 find_jump_block (basic_block jump_dest)
1560 basic_block source_bb = NULL;
1561 edge e;
1562 rtx insn;
1564 FOR_EACH_EDGE (e, jump_dest->preds)
1566 if (e->crossing_edge)
1568 basic_block src = e->src;
1570 /* Check each predecessor to see if it has a label, and contains
1571 only one executable instruction, which is an unconditional jump.
1572 If so, we can use it. */
1574 if (LABEL_P (BB_HEAD (src)))
1575 for (insn = BB_HEAD (src);
1576 !INSN_P (insn) && insn != NEXT_INSN (BB_END (src));
1577 insn = NEXT_INSN (insn))
1579 if (INSN_P (insn)
1580 && insn == BB_END (src)
1581 && JUMP_P (insn)
1582 && !any_condjump_p (insn))
1584 source_bb = src;
1585 break;
1589 if (source_bb)
1590 break;
1593 END_FOR_EACH_EDGE;
1594 return source_bb;
1597 /* Find all BB's with conditional jumps that are crossing edges;
1598 insert a new bb and make the conditional jump branch to the new
1599 bb instead (make the new bb same color so conditional branch won't
1600 be a 'crossing' edge). Insert an unconditional jump from the
1601 new bb to the original destination of the conditional jump. */
1603 static void
1604 fix_crossing_conditional_branches (void)
1606 basic_block cur_bb;
1607 basic_block new_bb;
1608 basic_block last_bb;
1609 basic_block dest;
1610 basic_block prev_bb;
1611 edge succ1;
1612 edge succ2;
1613 edge crossing_edge;
1614 edge new_edge;
1615 rtx old_jump;
1616 rtx set_src;
1617 rtx old_label = NULL_RTX;
1618 rtx new_label;
1619 rtx new_jump;
1620 rtx barrier;
1622 last_bb = EXIT_BLOCK_PTR->prev_bb;
1624 FOR_EACH_BB (cur_bb)
1626 crossing_edge = NULL;
1627 if (EDGE_COUNT (cur_bb->succs) > 0)
1628 succ1 = EDGE_SUCC (cur_bb, 0);
1629 else
1630 succ1 = NULL;
1632 if (EDGE_COUNT (cur_bb->succs) > 1)
1633 succ2 = EDGE_SUCC (cur_bb, 1);
1634 else
1635 succ2 = NULL;
1637 /* We already took care of fall-through edges, so only one successor
1638 can be a crossing edge. */
1640 if (succ1 && succ1->crossing_edge)
1641 crossing_edge = succ1;
1642 else if (succ2 && succ2->crossing_edge)
1643 crossing_edge = succ2;
1645 if (crossing_edge)
1647 old_jump = BB_END (cur_bb);
1649 /* Check to make sure the jump instruction is a
1650 conditional jump. */
1652 set_src = NULL_RTX;
1654 if (any_condjump_p (old_jump))
1656 if (GET_CODE (PATTERN (old_jump)) == SET)
1657 set_src = SET_SRC (PATTERN (old_jump));
1658 else if (GET_CODE (PATTERN (old_jump)) == PARALLEL)
1660 set_src = XVECEXP (PATTERN (old_jump), 0,0);
1661 if (GET_CODE (set_src) == SET)
1662 set_src = SET_SRC (set_src);
1663 else
1664 set_src = NULL_RTX;
1668 if (set_src && (GET_CODE (set_src) == IF_THEN_ELSE))
1670 if (GET_CODE (XEXP (set_src, 1)) == PC)
1671 old_label = XEXP (set_src, 2);
1672 else if (GET_CODE (XEXP (set_src, 2)) == PC)
1673 old_label = XEXP (set_src, 1);
1675 /* Check to see if new bb for jumping to that dest has
1676 already been created; if so, use it; if not, create
1677 a new one. */
1679 new_bb = find_jump_block (crossing_edge->dest);
1681 if (new_bb)
1682 new_label = block_label (new_bb);
1683 else
1685 /* Create new basic block to be dest for
1686 conditional jump. */
1688 new_bb = create_basic_block (NULL, NULL, last_bb);
1689 new_bb->rbi->next = last_bb->rbi->next;
1690 last_bb->rbi->next = new_bb;
1691 prev_bb = last_bb;
1692 last_bb = new_bb;
1694 /* Update register liveness information. */
1696 new_bb->global_live_at_start =
1697 OBSTACK_ALLOC_REG_SET (&flow_obstack);
1698 new_bb->global_live_at_end =
1699 OBSTACK_ALLOC_REG_SET (&flow_obstack);
1700 COPY_REG_SET (new_bb->global_live_at_end,
1701 prev_bb->global_live_at_end);
1702 COPY_REG_SET (new_bb->global_live_at_start,
1703 prev_bb->global_live_at_end);
1705 /* Put appropriate instructions in new bb. */
1707 new_label = gen_label_rtx ();
1708 emit_label_before (new_label, BB_HEAD (new_bb));
1709 BB_HEAD (new_bb) = new_label;
1711 if (GET_CODE (old_label) == LABEL_REF)
1713 old_label = JUMP_LABEL (old_jump);
1714 new_jump = emit_jump_insn_after (gen_jump
1715 (old_label),
1716 BB_END (new_bb));
1718 else if (HAVE_return
1719 && GET_CODE (old_label) == RETURN)
1720 new_jump = emit_jump_insn_after (gen_return (),
1721 BB_END (new_bb));
1722 else
1723 abort ();
1725 barrier = emit_barrier_after (new_jump);
1726 JUMP_LABEL (new_jump) = old_label;
1727 new_bb->rbi->footer = unlink_insn_chain (barrier,
1728 barrier);
1730 /* Make sure new bb is in same partition as source
1731 of conditional branch. */
1733 new_bb->partition = cur_bb->partition;
1736 /* Make old jump branch to new bb. */
1738 redirect_jump (old_jump, new_label, 0);
1740 /* Remove crossing_edge as predecessor of 'dest'. */
1742 dest = crossing_edge->dest;
1744 redirect_edge_succ (crossing_edge, new_bb);
1746 /* Make a new edge from new_bb to old dest; new edge
1747 will be a successor for new_bb and a predecessor
1748 for 'dest'. */
1750 if (EDGE_COUNT (new_bb->succs) == 0)
1751 new_edge = make_edge (new_bb, dest, 0);
1752 else
1753 new_edge = EDGE_SUCC (new_bb, 0);
1755 crossing_edge->crossing_edge = false;
1756 new_edge->crossing_edge = true;
1762 /* Find any unconditional branches that cross between hot and cold
1763 sections. Convert them into indirect jumps instead. */
1765 static void
1766 fix_crossing_unconditional_branches (void)
1768 basic_block cur_bb;
1769 rtx last_insn;
1770 rtx label;
1771 rtx label_addr;
1772 rtx indirect_jump_sequence;
1773 rtx jump_insn = NULL_RTX;
1774 rtx new_reg;
1775 rtx cur_insn;
1776 edge succ;
1778 FOR_EACH_BB (cur_bb)
1780 last_insn = BB_END (cur_bb);
1781 succ = EDGE_SUCC (cur_bb, 0);
1783 /* Check to see if bb ends in a crossing (unconditional) jump. At
1784 this point, no crossing jumps should be conditional. */
1786 if (JUMP_P (last_insn)
1787 && succ->crossing_edge)
1789 rtx label2, table;
1791 if (any_condjump_p (last_insn))
1792 abort ();
1794 /* Make sure the jump is not already an indirect or table jump. */
1796 else if (!computed_jump_p (last_insn)
1797 && !tablejump_p (last_insn, &label2, &table))
1799 /* We have found a "crossing" unconditional branch. Now
1800 we must convert it to an indirect jump. First create
1801 reference of label, as target for jump. */
1803 label = JUMP_LABEL (last_insn);
1804 label_addr = gen_rtx_LABEL_REF (Pmode, label);
1805 LABEL_NUSES (label) += 1;
1807 /* Get a register to use for the indirect jump. */
1809 new_reg = gen_reg_rtx (Pmode);
1811 /* Generate indirect the jump sequence. */
1813 start_sequence ();
1814 emit_move_insn (new_reg, label_addr);
1815 emit_indirect_jump (new_reg);
1816 indirect_jump_sequence = get_insns ();
1817 end_sequence ();
1819 /* Make sure every instruction in the new jump sequence has
1820 its basic block set to be cur_bb. */
1822 for (cur_insn = indirect_jump_sequence; cur_insn;
1823 cur_insn = NEXT_INSN (cur_insn))
1825 BLOCK_FOR_INSN (cur_insn) = cur_bb;
1826 if (JUMP_P (cur_insn))
1827 jump_insn = cur_insn;
1830 /* Insert the new (indirect) jump sequence immediately before
1831 the unconditional jump, then delete the unconditional jump. */
1833 emit_insn_before (indirect_jump_sequence, last_insn);
1834 delete_insn (last_insn);
1836 /* Make BB_END for cur_bb be the jump instruction (NOT the
1837 barrier instruction at the end of the sequence...). */
1839 BB_END (cur_bb) = jump_insn;
1845 /* Add REG_CROSSING_JUMP note to all crossing jump insns. */
1847 static void
1848 add_reg_crossing_jump_notes (void)
1850 basic_block bb;
1851 edge e;
1853 FOR_EACH_BB (bb)
1854 FOR_EACH_EDGE (e, bb->succs)
1856 if (e->crossing_edge
1857 && JUMP_P (BB_END (e->src)))
1858 REG_NOTES (BB_END (e->src)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1859 NULL_RTX,
1860 REG_NOTES (BB_END
1861 (e->src)));
1863 END_FOR_EACH_EDGE;
1866 /* Basic blocks containing NOTE_INSN_UNLIKELY_EXECUTED_CODE will be
1867 put in a separate section of the .o file, to reduce paging and
1868 improve cache performance (hopefully). This can result in bits of
1869 code from the same function being widely separated in the .o file.
1870 However this is not obvious to the current bb structure. Therefore
1871 we must take care to ensure that: 1). There are no fall_thru edges
1872 that cross between sections; 2). For those architectures which
1873 have "short" conditional branches, all conditional branches that
1874 attempt to cross between sections are converted to unconditional
1875 branches; and, 3). For those architectures which have "short"
1876 unconditional branches, all unconditional branches that attempt
1877 to cross between sections are converted to indirect jumps.
1879 The code for fixing up fall_thru edges that cross between hot and
1880 cold basic blocks does so by creating new basic blocks containing
1881 unconditional branches to the appropriate label in the "other"
1882 section. The new basic block is then put in the same (hot or cold)
1883 section as the original conditional branch, and the fall_thru edge
1884 is modified to fall into the new basic block instead. By adding
1885 this level of indirection we end up with only unconditional branches
1886 crossing between hot and cold sections.
1888 Conditional branches are dealt with by adding a level of indirection.
1889 A new basic block is added in the same (hot/cold) section as the
1890 conditional branch, and the conditional branch is retargeted to the
1891 new basic block. The new basic block contains an unconditional branch
1892 to the original target of the conditional branch (in the other section).
1894 Unconditional branches are dealt with by converting them into
1895 indirect jumps. */
1897 static void
1898 fix_edges_for_rarely_executed_code (edge *crossing_edges,
1899 int n_crossing_edges)
1901 /* Make sure the source of any crossing edge ends in a jump and the
1902 destination of any crossing edge has a label. */
1904 add_labels_and_missing_jumps (crossing_edges, n_crossing_edges);
1906 /* Convert all crossing fall_thru edges to non-crossing fall
1907 thrus to unconditional jumps (that jump to the original fall
1908 thru dest). */
1910 fix_up_fall_thru_edges ();
1912 /* If the architecture does not have conditional branches that can
1913 span all of memory, convert crossing conditional branches into
1914 crossing unconditional branches. */
1916 if (!HAS_LONG_COND_BRANCH)
1917 fix_crossing_conditional_branches ();
1919 /* If the architecture does not have unconditional branches that
1920 can span all of memory, convert crossing unconditional branches
1921 into indirect jumps. Since adding an indirect jump also adds
1922 a new register usage, update the register usage information as
1923 well. */
1925 if (!HAS_LONG_UNCOND_BRANCH)
1927 fix_crossing_unconditional_branches ();
1928 reg_scan (get_insns(), max_reg_num (), 1);
1931 add_reg_crossing_jump_notes ();
1934 /* Reorder basic blocks. The main entry point to this file. FLAGS is
1935 the set of flags to pass to cfg_layout_initialize(). */
1937 void
1938 reorder_basic_blocks (unsigned int flags)
1940 int n_traces;
1941 int i;
1942 struct trace *traces;
1944 if (n_basic_blocks <= 1)
1945 return;
1947 if (targetm.cannot_modify_jumps_p ())
1948 return;
1950 timevar_push (TV_REORDER_BLOCKS);
1952 cfg_layout_initialize (flags);
1954 set_edge_can_fallthru_flag ();
1955 mark_dfs_back_edges ();
1957 /* We are estimating the length of uncond jump insn only once since the code
1958 for getting the insn length always returns the minimal length now. */
1959 if (uncond_jump_length == 0)
1960 uncond_jump_length = get_uncond_jump_length ();
1962 /* We need to know some information for each basic block. */
1963 array_size = GET_ARRAY_SIZE (last_basic_block);
1964 bbd = xmalloc (array_size * sizeof (bbro_basic_block_data));
1965 for (i = 0; i < array_size; i++)
1967 bbd[i].start_of_trace = -1;
1968 bbd[i].end_of_trace = -1;
1969 bbd[i].heap = NULL;
1970 bbd[i].node = NULL;
1973 traces = xmalloc (n_basic_blocks * sizeof (struct trace));
1974 n_traces = 0;
1975 find_traces (&n_traces, traces);
1976 connect_traces (n_traces, traces);
1977 FREE (traces);
1978 FREE (bbd);
1980 if (dump_file)
1981 dump_flow_info (dump_file);
1983 if (flag_reorder_blocks_and_partition)
1984 add_unlikely_executed_notes ();
1986 cfg_layout_finalize ();
1988 timevar_pop (TV_REORDER_BLOCKS);
1991 /* This function is the main 'entrance' for the optimization that
1992 partitions hot and cold basic blocks into separate sections of the
1993 .o file (to improve performance and cache locality). Ideally it
1994 would be called after all optimizations that rearrange the CFG have
1995 been called. However part of this optimization may introduce new
1996 register usage, so it must be called before register allocation has
1997 occurred. This means that this optimization is actually called
1998 well before the optimization that reorders basic blocks (see function
1999 above).
2001 This optimization checks the feedback information to determine
2002 which basic blocks are hot/cold and adds
2003 NOTE_INSN_UNLIKELY_EXECUTED_CODE to non-hot basic blocks. The
2004 presence or absence of this note is later used for writing out
2005 sections in the .o file. This optimization must also modify the
2006 CFG to make sure there are no fallthru edges between hot & cold
2007 blocks, as those blocks will not necessarily be contiguous in the
2008 .o (or assembly) file; and in those cases where the architecture
2009 requires it, conditional and unconditional branches that cross
2010 between sections are converted into unconditional or indirect
2011 jumps, depending on what is appropriate. */
2013 void
2014 partition_hot_cold_basic_blocks (void)
2016 basic_block cur_bb;
2017 edge *crossing_edges;
2018 int n_crossing_edges;
2019 int max_edges = 2 * last_basic_block;
2021 if (n_basic_blocks <= 1)
2022 return;
2024 crossing_edges = xcalloc (max_edges, sizeof (edge));
2026 cfg_layout_initialize (0);
2028 FOR_EACH_BB (cur_bb)
2029 if (cur_bb->index >= 0
2030 && cur_bb->next_bb->index >= 0)
2031 cur_bb->rbi->next = cur_bb->next_bb;
2033 find_rarely_executed_basic_blocks_and_crossing_edges (crossing_edges,
2034 &n_crossing_edges,
2035 &max_edges);
2037 if (n_crossing_edges > 0)
2038 fix_edges_for_rarely_executed_code (crossing_edges, n_crossing_edges);
2040 free (crossing_edges);
2042 cfg_layout_finalize();