2003-03-13 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / bb-reorder.c
blob312021bd59de31dcdb18c6b9de458f6a847e853c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2002, 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* This (greedy) algorithm constructs traces in several rounds.
22 The construction starts from "seeds". The seed for the first round
23 is the entry point of function. When there are more than one seed
24 that one is selected first that has the lowest key in the heap
25 (see function bb_to_key). Then the algorithm repeatedly adds the most
26 probable successor to the end of a trace. Finally it connects the traces.
28 There are two parameters: Branch Threshold and Exec Threshold.
29 If the edge to a successor of the actual basic block is lower than
30 Branch Threshold or the frequency of the successor is lower than
31 Exec Threshold the successor will be the seed in one of the next rounds.
32 Each round has these parameters lower than the previous one.
33 The last round has to have these parameters set to zero
34 so that the remaining blocks are picked up.
36 The algorithm selects the most probable successor from all unvisited
37 successors and successors that have been added to this trace.
38 The other successors (that has not been "sent" to the next round) will be
39 other seeds for this round and the secondary traces will start in them.
40 If the successor has not been visited in this trace it is added to the trace
41 (however, there is some heuristic for simple branches).
42 If the successor has been visited in this trace the loop has been found.
43 If the loop has many iterations the loop is rotated so that the
44 source block of the most probable edge going out from the loop
45 is the last block of the trace.
46 If the loop has few iterations and there is no edge from the last block of
47 the loop going out from loop the loop header is duplicated.
48 Finally, the construction of the trace is terminated.
50 When connecting traces it first checks whether there is an edge from the
51 last block of one trace to the first block of another trace.
52 When there are still some unconnected traces it checks whether there exists
53 a basic block BB such that BB is a successor of the last bb of one trace
54 and BB is a predecessor of the first block of another trace. In this case,
55 BB is duplicated and the traces are connected through this duplicate.
56 The rest of traces are simply connected so there will be a jump to the
57 beginning of the rest of trace.
60 References:
62 "Software Trace Cache"
63 A. Ramirez, J. Larriba-Pey, C. Navarro, J. Torrellas and M. Valero; 1999
64 http://citeseer.nj.nec.com/15361.html
68 #include "config.h"
69 #include "system.h"
70 #include "coretypes.h"
71 #include "tm.h"
72 #include "rtl.h"
73 #include "basic-block.h"
74 #include "flags.h"
75 #include "output.h"
76 #include "cfglayout.h"
77 #include "fibheap.h"
78 #include "target.h"
80 /* The number of rounds. */
81 #define N_ROUNDS 4
83 /* Branch thresholds in thousandths (per mille) of the REG_BR_PROB_BASE. */
84 static int branch_threshold[N_ROUNDS] = {400, 200, 100, 0};
86 /* Exec thresholds in thousandths (per mille) of the frequency of bb 0. */
87 static int exec_threshold[N_ROUNDS] = {500, 200, 50, 0};
89 /* If edge frequency is lower than DUPLICATION_THRESHOLD per mille of entry
90 block the edge destination is not duplicated while connecting traces. */
91 #define DUPLICATION_THRESHOLD 100
93 /* Length of unconditional jump instruction. */
94 static int uncond_jump_length;
96 /* Structure to hold needed information for each basic block. */
97 typedef struct bbro_basic_block_data_def
99 /* Which trace is the bb start of (-1 means it is not a start of a trace). */
100 int start_of_trace;
102 /* Which trace is the bb end of (-1 means it is not an end of a trace). */
103 int end_of_trace;
105 /* Which heap is BB in (if any)? */
106 fibheap_t heap;
108 /* Which heap node is BB in (if any)? */
109 fibnode_t node;
110 } bbro_basic_block_data;
112 /* The current size of the following dynamic array. */
113 static int array_size;
115 /* The array which holds needed information for basic blocks. */
116 static bbro_basic_block_data *bbd;
118 /* To avoid frequent reallocation the size of arrays is greater than needed,
119 the number of elements is (not less than) 1.25 * size_wanted. */
120 #define GET_ARRAY_SIZE(X) ((((X) / 4) + 1) * 5)
122 /* Free the memory and set the pointer to NULL. */
123 #define FREE(P) \
124 do { if (P) { free (P); P = 0; } else { abort (); } } while (0)
126 /* Structure for holding information about a trace. */
127 struct trace
129 /* First and last basic block of the trace. */
130 basic_block first, last;
132 /* The round of the STC creation which this trace was found in. */
133 int round;
135 /* The length (i.e. the number of basic blocks) of the trace. */
136 int length;
139 /* Maximum frequency and count of one of the entry blocks. */
140 int max_entry_frequency;
141 gcov_type max_entry_count;
143 /* Local function prototypes. */
144 static void find_traces PARAMS ((int *, struct trace *));
145 static basic_block rotate_loop PARAMS ((edge, struct trace *, int));
146 static void mark_bb_visited PARAMS ((basic_block, int));
147 static void find_traces_1_round PARAMS ((int, int, gcov_type,
148 struct trace *, int *, int,
149 fibheap_t *));
150 static basic_block copy_bb PARAMS ((basic_block, edge,
151 basic_block, int));
152 static fibheapkey_t bb_to_key PARAMS ((basic_block));
153 static bool better_edge_p PARAMS ((basic_block, edge, int, int,
154 int, int));
155 static void connect_traces PARAMS ((int, struct trace *));
156 static bool copy_bb_p PARAMS ((basic_block, int));
157 static int get_uncond_jump_length PARAMS ((void));
159 /* Find the traces for Software Trace Cache. Chain each trace through
160 RBI()->next. Store the number of traces to N_TRACES and description of
161 traces to TRACES. */
163 static void
164 find_traces (n_traces, traces)
165 int *n_traces;
166 struct trace *traces;
168 int i;
169 edge e;
170 fibheap_t heap;
172 /* Insert entry points of function into heap. */
173 heap = fibheap_new ();
174 max_entry_frequency = 0;
175 max_entry_count = 0;
176 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
178 bbd[e->dest->index].heap = heap;
179 bbd[e->dest->index].node = fibheap_insert (heap, bb_to_key (e->dest),
180 e->dest);
181 if (e->dest->frequency > max_entry_frequency)
182 max_entry_frequency = e->dest->frequency;
183 if (e->dest->count > max_entry_count)
184 max_entry_count = e->dest->count;
187 /* Find the traces. */
188 for (i = 0; i < N_ROUNDS; i++)
190 gcov_type count_threshold;
192 if (rtl_dump_file)
193 fprintf (rtl_dump_file, "STC - round %d\n", i + 1);
195 if (max_entry_count < INT_MAX / 1000)
196 count_threshold = max_entry_count * exec_threshold[i] / 1000;
197 else
198 count_threshold = max_entry_count / 1000 * exec_threshold[i];
200 find_traces_1_round (REG_BR_PROB_BASE * branch_threshold[i] / 1000,
201 max_entry_frequency * exec_threshold[i] / 1000,
202 count_threshold, traces, n_traces, i, &heap);
204 fibheap_delete (heap);
206 if (rtl_dump_file)
208 for (i = 0; i < *n_traces; i++)
210 basic_block bb;
211 fprintf (rtl_dump_file, "Trace %d (round %d): ", i + 1,
212 traces[i].round + 1);
213 for (bb = traces[i].first; bb != traces[i].last; bb = RBI (bb)->next)
214 fprintf (rtl_dump_file, "%d [%d] ", bb->index, bb->frequency);
215 fprintf (rtl_dump_file, "%d [%d]\n", bb->index, bb->frequency);
217 fflush (rtl_dump_file);
221 /* Rotate loop whose back edge is BACK_EDGE in the tail of trace TRACE
222 (with sequential number TRACE_N). */
224 static basic_block
225 rotate_loop (back_edge, trace, trace_n)
226 edge back_edge;
227 struct trace *trace;
228 int trace_n;
230 basic_block bb;
232 /* Information about the best end (end after rotation) of the loop. */
233 basic_block best_bb = NULL;
234 edge best_edge = NULL;
235 int best_freq = -1;
236 gcov_type best_count = -1;
237 /* The best edge is preferred when its destination is not visited yet
238 or is a start block of some trace. */
239 bool is_preferred = false;
241 /* Find the most frequent edge that goes out from current trace. */
242 bb = back_edge->dest;
245 edge e;
246 for (e = bb->succ; e; e = e->succ_next)
247 if (e->dest != EXIT_BLOCK_PTR
248 && RBI (e->dest)->visited != trace_n
249 && (e->flags & EDGE_CAN_FALLTHRU)
250 && !(e->flags & EDGE_COMPLEX))
252 if (is_preferred)
254 /* The best edge is preferred. */
255 if (!RBI (e->dest)->visited
256 || bbd[e->dest->index].start_of_trace >= 0)
258 /* The current edge E is also preferred. */
259 int freq = EDGE_FREQUENCY (e);
260 if (freq > best_freq || e->count > best_count)
262 best_freq = freq;
263 best_count = e->count;
264 best_edge = e;
265 best_bb = bb;
269 else
271 if (!RBI (e->dest)->visited
272 || bbd[e->dest->index].start_of_trace >= 0)
274 /* The current edge E is preferred. */
275 is_preferred = true;
276 best_freq = EDGE_FREQUENCY (e);
277 best_count = e->count;
278 best_edge = e;
279 best_bb = bb;
281 else
283 int freq = EDGE_FREQUENCY (e);
284 if (!best_edge || freq > best_freq || e->count > best_count)
286 best_freq = freq;
287 best_count = e->count;
288 best_edge = e;
289 best_bb = bb;
294 bb = RBI (bb)->next;
296 while (bb != back_edge->dest);
298 if (best_bb)
300 /* Rotate the loop so that the BEST_EDGE goes out from the last block of
301 the trace. */
302 if (back_edge->dest == trace->first)
304 trace->first = RBI (best_bb)->next;
306 else
308 basic_block prev_bb;
310 for (prev_bb = trace->first;
311 RBI (prev_bb)->next != back_edge->dest;
312 prev_bb = RBI (prev_bb)->next)
314 RBI (prev_bb)->next = RBI (best_bb)->next;
316 /* Try to get rid of uncond jump to cond jump. */
317 if (prev_bb->succ && !prev_bb->succ->succ_next)
319 basic_block header = prev_bb->succ->dest;
321 /* Duplicate HEADER if it is a small block containing cond jump
322 in the end. */
323 if (any_condjump_p (header->end) && copy_bb_p (header, 0))
325 copy_bb (header, prev_bb->succ, prev_bb, trace_n);
330 else
332 /* We have not found suitable loop tail so do no rotation. */
333 best_bb = back_edge->src;
335 RBI (best_bb)->next = NULL;
336 return best_bb;
339 /* This function marks BB that it was visited in trace number TRACE. */
341 static void
342 mark_bb_visited (bb, trace)
343 basic_block bb;
344 int trace;
346 RBI (bb)->visited = trace;
347 if (bbd[bb->index].heap)
349 fibheap_delete_node (bbd[bb->index].heap, bbd[bb->index].node);
350 bbd[bb->index].heap = NULL;
351 bbd[bb->index].node = NULL;
355 /* One round of finding traces. Find traces for BRANCH_TH and EXEC_TH i.e. do
356 not include basic blocks their probability is lower than BRANCH_TH or their
357 frequency is lower than EXEC_TH into traces (or count is lower than
358 COUNT_TH). It stores the new traces into TRACES and modifies the number of
359 traces *N_TRACES. Sets the round (which the trace belongs to) to ROUND. It
360 expects that starting basic blocks are in *HEAP and at the end it deletes
361 *HEAP and stores starting points for the next round into new *HEAP. */
363 static void
364 find_traces_1_round (branch_th, exec_th, count_th, traces, n_traces, round,
365 heap)
366 int branch_th;
367 int exec_th;
368 gcov_type count_th;
369 struct trace *traces;
370 int *n_traces;
371 int round;
372 fibheap_t *heap;
374 /* Heap for discarded basic blocks which are possible starting points for
375 the next round. */
376 fibheap_t new_heap = fibheap_new ();
378 while (!fibheap_empty (*heap))
380 basic_block bb;
381 struct trace *trace;
382 edge best_edge, e;
383 fibheapkey_t key;
385 bb = fibheap_extract_min (*heap);
386 bbd[bb->index].heap = NULL;
387 bbd[bb->index].node = NULL;
389 if (rtl_dump_file)
390 fprintf (rtl_dump_file, "Getting bb %d\n", bb->index);
392 /* If the BB's frequency is too low send BB to the next round. */
393 if (bb->frequency < exec_th || bb->count < count_th
394 || ((round < N_ROUNDS - 1) && probably_never_executed_bb_p (bb)))
396 int key = bb_to_key (bb);
397 bbd[bb->index].heap = new_heap;
398 bbd[bb->index].node = fibheap_insert (new_heap, key, bb);
400 if (rtl_dump_file)
401 fprintf (rtl_dump_file,
402 " Possible start point of next round: %d (key: %d)\n",
403 bb->index, key);
404 continue;
407 trace = traces + *n_traces;
408 trace->first = bb;
409 trace->round = round;
410 trace->length = 0;
411 (*n_traces)++;
415 int prob, freq;
417 /* The probability and frequency of the best edge. */
418 int best_prob = INT_MIN / 2;
419 int best_freq = INT_MIN / 2;
421 best_edge = NULL;
422 mark_bb_visited (bb, *n_traces);
423 trace->length++;
425 if (rtl_dump_file)
426 fprintf (rtl_dump_file, "Basic block %d was visited in trace %d\n",
427 bb->index, *n_traces - 1);
429 /* Select the successor that will be placed after BB. */
430 for (e = bb->succ; e; e = e->succ_next)
432 if (e->flags & EDGE_FAKE)
433 abort ();
435 if (e->dest == EXIT_BLOCK_PTR)
436 continue;
438 if (RBI (e->dest)->visited
439 && RBI (e->dest)->visited != *n_traces)
440 continue;
442 prob = e->probability;
443 freq = EDGE_FREQUENCY (e);
445 /* Edge that cannot be fallthru or improbable or infrequent
446 successor (ie. it is unsuitable successor). */
447 if (!(e->flags & EDGE_CAN_FALLTHRU) || (e->flags & EDGE_COMPLEX)
448 || prob < branch_th || freq < exec_th || e->count < count_th)
449 continue;
451 if (better_edge_p (bb, e, prob, freq, best_prob, best_freq))
453 best_edge = e;
454 best_prob = prob;
455 best_freq = freq;
459 /* If the best destination has multiple predecessors, and can be
460 duplicated cheaper than a jump, don't allow it to be added
461 to a trace. We'll duplicate it when connecting traces. */
462 if (best_edge && best_edge->dest->pred->pred_next
463 && copy_bb_p (best_edge->dest, 0))
464 best_edge = NULL;
466 /* Add all non-selected successors to the heaps. */
467 for (e = bb->succ; e; e = e->succ_next)
469 if (e == best_edge
470 || e->dest == EXIT_BLOCK_PTR
471 || RBI (e->dest)->visited)
472 continue;
474 key = bb_to_key (e->dest);
476 if (bbd[e->dest->index].heap)
478 /* E->DEST is already in some heap. */
479 if (key != bbd[e->dest->index].node->key)
481 if (rtl_dump_file)
483 fprintf (rtl_dump_file,
484 "Changing key for bb %d from %ld to %ld.\n",
485 e->dest->index,
486 (long) bbd[e->dest->index].node->key,
487 key);
489 fibheap_replace_key (bbd[e->dest->index].heap,
490 bbd[e->dest->index].node, key);
493 else
495 fibheap_t which_heap = *heap;
497 prob = e->probability;
498 freq = EDGE_FREQUENCY (e);
500 if (!(e->flags & EDGE_CAN_FALLTHRU)
501 || (e->flags & EDGE_COMPLEX)
502 || prob < branch_th || freq < exec_th
503 || e->count < count_th)
505 if (round < N_ROUNDS - 1)
506 which_heap = new_heap;
509 bbd[e->dest->index].heap = which_heap;
510 bbd[e->dest->index].node = fibheap_insert (which_heap,
511 key, e->dest);
513 if (rtl_dump_file)
515 fprintf (rtl_dump_file,
516 " Possible start of %s round: %d (key: %ld)\n",
517 (which_heap == new_heap) ? "next" : "this",
518 e->dest->index, (long) key);
524 if (best_edge) /* Suitable successor was found. */
526 if (RBI (best_edge->dest)->visited == *n_traces)
528 /* We do nothing with one basic block loops. */
529 if (best_edge->dest != bb)
531 if (EDGE_FREQUENCY (best_edge)
532 > 4 * best_edge->dest->frequency / 5)
534 /* The loop has at least 4 iterations. If the loop
535 header is not the first block of the function
536 we can rotate the loop. */
538 if (best_edge->dest != ENTRY_BLOCK_PTR->next_bb)
540 if (rtl_dump_file)
542 fprintf (rtl_dump_file,
543 "Rotating loop %d - %d\n",
544 best_edge->dest->index, bb->index);
546 RBI (bb)->next = best_edge->dest;
547 bb = rotate_loop (best_edge, trace, *n_traces);
550 else
552 /* The loop has less than 4 iterations. */
554 /* Check whether there is another edge from BB. */
555 edge another_edge;
556 for (another_edge = bb->succ;
557 another_edge;
558 another_edge = another_edge->succ_next)
559 if (another_edge != best_edge)
560 break;
562 if (!another_edge && copy_bb_p (best_edge->dest,
563 !optimize_size))
565 bb = copy_bb (best_edge->dest, best_edge, bb,
566 *n_traces);
571 /* Terminate the trace. */
572 break;
574 else
576 /* Check for a situation
584 where
585 EDGE_FREQUENCY (AB) + EDGE_FREQUENCY (BC)
586 >= EDGE_FREQUENCY (AC).
587 (i.e. 2 * B->frequency >= EDGE_FREQUENCY (AC) )
588 Best ordering is then A B C.
590 This situation is created for example by:
592 if (A) B;
597 for (e = bb->succ; e; e = e->succ_next)
598 if (e != best_edge
599 && (e->flags & EDGE_CAN_FALLTHRU)
600 && !(e->flags & EDGE_COMPLEX)
601 && !RBI (e->dest)->visited
602 && !e->dest->pred->pred_next
603 && e->dest->succ
604 && (e->dest->succ->flags & EDGE_CAN_FALLTHRU)
605 && !(e->dest->succ->flags & EDGE_COMPLEX)
606 && !e->dest->succ->succ_next
607 && e->dest->succ->dest == best_edge->dest
608 && 2 * e->dest->frequency >= EDGE_FREQUENCY (best_edge))
610 best_edge = e;
611 if (rtl_dump_file)
612 fprintf (rtl_dump_file, "Selecting BB %d\n",
613 best_edge->dest->index);
614 break;
617 RBI (bb)->next = best_edge->dest;
618 bb = best_edge->dest;
622 while (best_edge);
623 trace->last = bb;
624 bbd[trace->first->index].start_of_trace = *n_traces - 1;
625 bbd[trace->last->index].end_of_trace = *n_traces - 1;
627 /* The trace is terminated so we have to recount the keys in heap
628 (some block can have a lower key because now one of its predecessors
629 is an end of the trace). */
630 for (e = bb->succ; e; e = e->succ_next)
632 if (e->dest == EXIT_BLOCK_PTR
633 || RBI (e->dest)->visited)
634 continue;
636 if (bbd[e->dest->index].heap)
638 key = bb_to_key (e->dest);
639 if (key != bbd[e->dest->index].node->key)
641 if (rtl_dump_file)
643 fprintf (rtl_dump_file,
644 "Changing key for bb %d from %ld to %ld.\n",
645 e->dest->index,
646 (long) bbd[e->dest->index].node->key, key);
648 fibheap_replace_key (bbd[e->dest->index].heap,
649 bbd[e->dest->index].node,
650 key);
656 fibheap_delete (*heap);
658 /* "Return" the new heap. */
659 *heap = new_heap;
662 /* Create a duplicate of the basic block OLD_BB and redirect edge E to it, add
663 it to trace after BB, mark OLD_BB visited and update pass' data structures
664 (TRACE is a number of trace which OLD_BB is duplicated to). */
666 static basic_block
667 copy_bb (old_bb, e, bb, trace)
668 basic_block old_bb;
669 edge e;
670 basic_block bb;
671 int trace;
673 basic_block new_bb;
675 new_bb = cfg_layout_duplicate_bb (old_bb, e);
676 if (e->dest != new_bb)
677 abort ();
678 if (RBI (e->dest)->visited)
679 abort ();
680 if (rtl_dump_file)
681 fprintf (rtl_dump_file,
682 "Duplicated bb %d (created bb %d)\n",
683 old_bb->index, new_bb->index);
684 RBI (new_bb)->visited = trace;
685 RBI (new_bb)->next = RBI (bb)->next;
686 RBI (bb)->next = new_bb;
688 if (new_bb->index >= array_size || last_basic_block > array_size)
690 int i;
691 int new_size;
693 new_size = MAX (last_basic_block, new_bb->index + 1);
694 new_size = GET_ARRAY_SIZE (new_size);
695 bbd = xrealloc (bbd, new_size * sizeof (bbro_basic_block_data));
696 for (i = array_size; i < new_size; i++)
698 bbd[i].start_of_trace = -1;
699 bbd[i].end_of_trace = -1;
700 bbd[i].heap = NULL;
701 bbd[i].node = NULL;
703 array_size = new_size;
705 if (rtl_dump_file)
707 fprintf (rtl_dump_file,
708 "Growing the dynamic array to %d elements.\n",
709 array_size);
713 return new_bb;
716 /* Compute and return the key (for the heap) of the basic block BB. */
718 static fibheapkey_t
719 bb_to_key (bb)
720 basic_block bb;
722 edge e;
724 int priority = 0;
726 /* Do not start in probably never executed blocks. */
727 if (probably_never_executed_bb_p (bb))
728 return BB_FREQ_MAX;
730 /* Prefer blocks whose predecessor is an end of some trace
731 or whose predecessor edge is EDGE_DFS_BACK. */
732 for (e = bb->pred; e; e = e->pred_next)
734 if ((e->src != ENTRY_BLOCK_PTR && bbd[e->src->index].end_of_trace >= 0)
735 || (e->flags & EDGE_DFS_BACK))
737 int edge_freq = EDGE_FREQUENCY (e);
739 if (edge_freq > priority)
740 priority = edge_freq;
744 if (priority)
745 /* The block with priority should have significantly lower key. */
746 return -(100 * BB_FREQ_MAX + 100 * priority + bb->frequency);
747 return -bb->frequency;
750 /* Return true when the edge E from basic block BB is better than the temporary
751 best edge (details are in function). The probability of edge E is PROB. The
752 frequency of the successor is FREQ. The current best probability is
753 BEST_PROB, the best frequency is BEST_FREQ.
754 The edge is considered to be equivalent when PROB does not differ much from
755 BEST_PROB; similarly for frequency. */
757 static bool
758 better_edge_p (bb, e, prob, freq, best_prob, best_freq)
759 basic_block bb;
760 edge e;
761 int prob;
762 int freq;
763 int best_prob;
764 int best_freq;
766 bool is_better_edge;
768 /* The BEST_* values do not have to be best, but can be a bit smaller than
769 maximum values. */
770 int diff_prob = best_prob / 10;
771 int diff_freq = best_freq / 10;
773 if (prob > best_prob + diff_prob)
774 /* The edge has higher probability than the temporary best edge. */
775 is_better_edge = true;
776 else if (prob < best_prob - diff_prob)
777 /* The edge has lower probability than the temporary best edge. */
778 is_better_edge = false;
779 else if (freq < best_freq - diff_freq)
780 /* The edge and the temporary best edge have almost equivalent
781 probabilities. The higher frequency of a successor now means
782 that there is another edge going into that successor.
783 This successor has lower frequency so it is better. */
784 is_better_edge = true;
785 else if (freq > best_freq + diff_freq)
786 /* This successor has higher frequency so it is worse. */
787 is_better_edge = false;
788 else if (e->dest->prev_bb == bb)
789 /* The edges have equivalent probabilities and the successors
790 have equivalent frequencies. Select the previous successor. */
791 is_better_edge = true;
792 else
793 is_better_edge = false;
795 return is_better_edge;
798 /* Connect traces in array TRACES, N_TRACES is the count of traces. */
800 static void
801 connect_traces (n_traces, traces)
802 int n_traces;
803 struct trace *traces;
805 int i;
806 bool *connected;
807 int last_trace;
808 int freq_threshold;
809 gcov_type count_threshold;
811 freq_threshold = max_entry_frequency * DUPLICATION_THRESHOLD / 1000;
812 if (max_entry_count < INT_MAX / 1000)
813 count_threshold = max_entry_count * DUPLICATION_THRESHOLD / 1000;
814 else
815 count_threshold = max_entry_count / 1000 * DUPLICATION_THRESHOLD;
817 connected = xcalloc (n_traces, sizeof (bool));
818 last_trace = -1;
819 for (i = 0; i < n_traces; i++)
821 int t = i;
822 int t2;
823 edge e, best;
824 int best_len;
826 if (connected[t])
827 continue;
829 connected[t] = true;
831 /* Find the predecessor traces. */
832 for (t2 = t; t2 > 0;)
834 best = NULL;
835 best_len = 0;
836 for (e = traces[t2].first->pred; e; e = e->pred_next)
838 int si = e->src->index;
840 if (e->src != ENTRY_BLOCK_PTR
841 && (e->flags & EDGE_CAN_FALLTHRU)
842 && !(e->flags & EDGE_COMPLEX)
843 && bbd[si].end_of_trace >= 0
844 && !connected[bbd[si].end_of_trace]
845 && (!best
846 || e->probability > best->probability
847 || (e->probability == best->probability
848 && traces[bbd[si].end_of_trace].length > best_len)))
850 best = e;
851 best_len = traces[bbd[si].end_of_trace].length;
854 if (best)
856 RBI (best->src)->next = best->dest;
857 t2 = bbd[best->src->index].end_of_trace;
858 connected[t2] = true;
859 if (rtl_dump_file)
861 fprintf (rtl_dump_file, "Connection: %d %d\n",
862 best->src->index, best->dest->index);
865 else
866 break;
869 if (last_trace >= 0)
870 RBI (traces[last_trace].last)->next = traces[t2].first;
871 last_trace = t;
873 /* Find the successor traces. */
874 while (1)
876 /* Find the continuation of the chain. */
877 best = NULL;
878 best_len = 0;
879 for (e = traces[t].last->succ; e; e = e->succ_next)
881 int di = e->dest->index;
883 if (e->dest != EXIT_BLOCK_PTR
884 && (e->flags & EDGE_CAN_FALLTHRU)
885 && !(e->flags & EDGE_COMPLEX)
886 && bbd[di].start_of_trace >= 0
887 && !connected[bbd[di].start_of_trace]
888 && (!best
889 || e->probability > best->probability
890 || (e->probability == best->probability
891 && traces[bbd[di].start_of_trace].length > best_len)))
893 best = e;
894 best_len = traces[bbd[di].start_of_trace].length;
898 if (best)
900 if (rtl_dump_file)
902 fprintf (rtl_dump_file, "Connection: %d %d\n",
903 best->src->index, best->dest->index);
905 t = bbd[best->dest->index].start_of_trace;
906 RBI (traces[last_trace].last)->next = traces[t].first;
907 connected[t] = true;
908 last_trace = t;
910 else
912 /* Try to connect the traces by duplication of 1 block. */
913 edge e2;
914 basic_block next_bb = NULL;
915 bool try_copy = false;
917 for (e = traces[t].last->succ; e; e = e->succ_next)
918 if (e->dest != EXIT_BLOCK_PTR
919 && (e->flags & EDGE_CAN_FALLTHRU)
920 && !(e->flags & EDGE_COMPLEX)
921 && (!best || e->probability > best->probability))
923 edge best2 = NULL;
924 int best2_len = 0;
926 /* If the destination is a start of a trace which is only
927 one block long, then no need to search the successor
928 blocks of the trace. Accept it. */
929 if (bbd[e->dest->index].start_of_trace >= 0
930 && traces[bbd[e->dest->index].start_of_trace].length
931 == 1)
933 best = e;
934 try_copy = true;
935 continue;
938 for (e2 = e->dest->succ; e2; e2 = e2->succ_next)
940 int di = e2->dest->index;
942 if (e2->dest == EXIT_BLOCK_PTR
943 || ((e2->flags & EDGE_CAN_FALLTHRU)
944 && !(e2->flags & EDGE_COMPLEX)
945 && bbd[di].start_of_trace >= 0
946 && !connected[bbd[di].start_of_trace]
947 && (EDGE_FREQUENCY (e2) >= freq_threshold)
948 && (e2->count >= count_threshold)
949 && (!best2
950 || e2->probability > best2->probability
951 || (e2->probability == best2->probability
952 && traces[bbd[di].start_of_trace].length
953 > best2_len))))
955 best = e;
956 best2 = e2;
957 if (e2->dest != EXIT_BLOCK_PTR)
958 best2_len = traces[bbd[di].start_of_trace].length;
959 else
960 best2_len = INT_MAX;
961 next_bb = e2->dest;
962 try_copy = true;
967 /* Copy tiny blocks always; copy larger blocks only when the
968 edge is traversed frequently enough. */
969 if (try_copy
970 && copy_bb_p (best->dest,
971 !optimize_size
972 && EDGE_FREQUENCY (best) >= freq_threshold
973 && best->count >= count_threshold))
975 basic_block new_bb;
977 if (rtl_dump_file)
979 fprintf (rtl_dump_file, "Connection: %d %d ",
980 traces[t].last->index, best->dest->index);
981 if (!next_bb)
982 fputc ('\n', rtl_dump_file);
983 else if (next_bb == EXIT_BLOCK_PTR)
984 fprintf (rtl_dump_file, "exit\n");
985 else
986 fprintf (rtl_dump_file, "%d\n", next_bb->index);
989 new_bb = copy_bb (best->dest, best, traces[t].last, t);
990 traces[t].last = new_bb;
991 if (next_bb && next_bb != EXIT_BLOCK_PTR)
993 t = bbd[next_bb->index].start_of_trace;
994 RBI (traces[last_trace].last)->next = traces[t].first;
995 connected[t] = true;
996 last_trace = t;
998 else
999 break; /* Stop finding the successor traces. */
1001 else
1002 break; /* Stop finding the successor traces. */
1007 if (rtl_dump_file)
1009 basic_block bb;
1011 fprintf (rtl_dump_file, "Final order:\n");
1012 for (bb = traces[0].first; bb; bb = RBI (bb)->next)
1013 fprintf (rtl_dump_file, "%d ", bb->index);
1014 fprintf (rtl_dump_file, "\n");
1015 fflush (rtl_dump_file);
1018 FREE (connected);
1021 /* Return true when BB can and should be copied. CODE_MAY_GROW is true
1022 when code size is allowed to grow by duplication. */
1024 static bool
1025 copy_bb_p (bb, code_may_grow)
1026 basic_block bb;
1027 int code_may_grow;
1029 int size = 0;
1030 int max_size = uncond_jump_length;
1031 rtx insn;
1033 if (!bb->frequency)
1034 return false;
1035 if (!bb->pred || !bb->pred->pred_next)
1036 return false;
1037 if (!cfg_layout_can_duplicate_bb_p (bb))
1038 return false;
1040 if (code_may_grow && maybe_hot_bb_p (bb))
1041 max_size *= 8;
1043 for (insn = bb->head; insn != NEXT_INSN (bb->end);
1044 insn = NEXT_INSN (insn))
1046 if (INSN_P (insn))
1047 size += get_attr_length (insn);
1050 if (size <= max_size)
1051 return true;
1053 if (rtl_dump_file)
1055 fprintf (rtl_dump_file,
1056 "Block %d can't be copied because its size = %d.\n",
1057 bb->index, size);
1060 return false;
1063 /* Return the length of unconditional jump instruction. */
1065 static int
1066 get_uncond_jump_length ()
1068 rtx label, jump;
1069 int length;
1071 label = emit_label_before (gen_label_rtx (), get_insns ());
1072 jump = emit_jump_insn (gen_jump (label));
1074 length = get_attr_length (jump);
1076 delete_insn (jump);
1077 delete_insn (label);
1078 return length;
1081 /* Reorder basic blocks. The main entry point to this file. */
1083 void
1084 reorder_basic_blocks ()
1086 int n_traces;
1087 int i;
1088 struct trace *traces;
1090 if (n_basic_blocks <= 1)
1091 return;
1093 if ((* targetm.cannot_modify_jumps_p) ())
1094 return;
1096 cfg_layout_initialize (NULL);
1098 set_edge_can_fallthru_flag ();
1099 mark_dfs_back_edges ();
1101 /* We are estimating the lenght of uncond jump insn only once since the code
1102 for getting the insn lenght always returns the minimal length now. */
1103 if (uncond_jump_length == 0)
1104 uncond_jump_length = get_uncond_jump_length ();
1106 /* We need to know some information for each basic block. */
1107 array_size = GET_ARRAY_SIZE (last_basic_block);
1108 bbd = xmalloc (array_size * sizeof (bbro_basic_block_data));
1109 for (i = 0; i < array_size; i++)
1111 bbd[i].start_of_trace = -1;
1112 bbd[i].end_of_trace = -1;
1113 bbd[i].heap = NULL;
1114 bbd[i].node = NULL;
1117 traces = xmalloc (n_basic_blocks * sizeof (struct trace));
1118 n_traces = 0;
1119 find_traces (&n_traces, traces);
1120 connect_traces (n_traces, traces);
1121 FREE (traces);
1122 FREE (bbd);
1124 if (rtl_dump_file)
1125 dump_flow_info (rtl_dump_file);
1127 cfg_layout_finalize ();