1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack
;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_tail_eff_head
;
40 static rtx skip_insns_after_block
PARAMS ((basic_block
));
41 static void record_effective_endpoints
PARAMS ((void));
42 static rtx label_for_bb
PARAMS ((basic_block
));
43 static void fixup_reorder_chain
PARAMS ((void));
45 static void set_block_levels
PARAMS ((tree
, int));
46 static void change_scope
PARAMS ((rtx
, tree
, tree
));
48 void verify_insn_chain
PARAMS ((void));
49 static void fixup_fallthru_exit_predecessor
PARAMS ((void));
51 /* Map insn uid to lexical block. */
52 static varray_type insn_scopes
;
54 /* Skip over inter-block insns occurring after BB which are typically
55 associated with BB (e.g., barriers). If there are any such insns,
56 we return the last one. Otherwise, we return the end of BB. */
59 skip_insns_after_block (bb
)
62 rtx insn
, last_insn
, next_head
, prev
;
65 if (bb
->index
+ 1 != n_basic_blocks
)
66 next_head
= BASIC_BLOCK (bb
->index
+ 1)->head
;
68 for (last_insn
= insn
= bb
->end
; (insn
= NEXT_INSN (insn
)) != 0; )
70 if (insn
== next_head
)
73 switch (GET_CODE (insn
))
80 switch (NOTE_LINE_NUMBER (insn
))
82 case NOTE_INSN_LOOP_END
:
83 case NOTE_INSN_BLOCK_END
:
86 case NOTE_INSN_DELETED
:
87 case NOTE_INSN_DELETED_LABEL
:
98 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
99 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
100 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
102 insn
= NEXT_INSN (insn
);
115 /* It is possible to hit contradictory sequence. For instance:
121 Where barrier belongs to jump_insn, but the note does not. This can be
122 created by removing the basic block originally following
123 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
125 for (insn
= last_insn
; insn
!= bb
->end
; insn
= prev
)
127 prev
= PREV_INSN (insn
);
128 if (GET_CODE (insn
) == NOTE
)
129 switch (NOTE_LINE_NUMBER (insn
))
131 case NOTE_INSN_LOOP_END
:
132 case NOTE_INSN_BLOCK_END
:
133 case NOTE_INSN_DELETED
:
134 case NOTE_INSN_DELETED_LABEL
:
137 reorder_insns (insn
, insn
, last_insn
);
144 /* Locate or create a label for a given basic block. */
150 rtx label
= bb
->head
;
152 if (GET_CODE (label
) != CODE_LABEL
)
155 fprintf (rtl_dump_file
, "Emitting label for block %d\n", bb
->index
);
157 label
= block_label (bb
);
158 if (bb
->head
== PREV_INSN (RBI (bb
)->eff_head
))
159 RBI (bb
)->eff_head
= label
;
165 /* Locate the effective beginning and end of the insn chain for each
166 block, as defined by skip_insns_after_block above. */
169 record_effective_endpoints ()
171 rtx next_insn
= get_insns ();
174 for (i
= 0; i
< n_basic_blocks
; i
++)
176 basic_block bb
= BASIC_BLOCK (i
);
179 RBI (bb
)->eff_head
= next_insn
;
180 end
= skip_insns_after_block (bb
);
181 RBI (bb
)->eff_end
= end
;
182 next_insn
= NEXT_INSN (end
);
185 function_tail_eff_head
= next_insn
;
188 /* Build a varray mapping INSN_UID to lexical block. Return it. */
191 scope_to_insns_initialize ()
196 VARRAY_TREE_INIT (insn_scopes
, get_max_uid (), "insn scopes");
198 for (insn
= get_insns (); insn
; insn
= next
)
200 next
= NEXT_INSN (insn
);
202 if (active_insn_p (insn
)
203 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
204 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
205 VARRAY_TREE (insn_scopes
, INSN_UID (insn
)) = block
;
206 else if (GET_CODE (insn
) == NOTE
)
208 switch (NOTE_LINE_NUMBER (insn
))
210 case NOTE_INSN_BLOCK_BEG
:
211 block
= NOTE_BLOCK (insn
);
214 case NOTE_INSN_BLOCK_END
:
215 block
= BLOCK_SUPERCONTEXT (block
);
225 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
226 found in the block tree. */
229 set_block_levels (block
, level
)
235 BLOCK_NUMBER (block
) = level
;
236 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
237 block
= BLOCK_CHAIN (block
);
241 /* Emit lexical block notes needed to change scope from S1 to S2. */
244 change_scope (orig_insn
, s1
, s2
)
248 rtx insn
= orig_insn
;
249 tree com
= NULL_TREE
;
250 tree ts1
= s1
, ts2
= s2
;
255 if (ts1
== NULL
|| ts2
== NULL
)
257 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
258 ts1
= BLOCK_SUPERCONTEXT (ts1
);
259 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
260 ts2
= BLOCK_SUPERCONTEXT (ts2
);
263 ts1
= BLOCK_SUPERCONTEXT (ts1
);
264 ts2
= BLOCK_SUPERCONTEXT (ts2
);
273 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
274 NOTE_BLOCK (note
) = s
;
275 s
= BLOCK_SUPERCONTEXT (s
);
282 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
283 NOTE_BLOCK (insn
) = s
;
284 s
= BLOCK_SUPERCONTEXT (s
);
288 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
289 on the scope tree and the newly reordered instructions. */
292 scope_to_insns_finalize ()
294 tree cur_block
= DECL_INITIAL (cfun
->decl
);
297 /* Tag the blocks with a depth number so that change_scope can find
298 the common parent easily. */
299 set_block_levels (cur_block
, 0);
301 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
305 if ((size_t) INSN_UID (insn
) >= insn_scopes
->num_elements
)
307 this_block
= VARRAY_TREE (insn_scopes
, INSN_UID (insn
));
311 if (this_block
!= cur_block
)
313 change_scope (insn
, cur_block
, this_block
);
314 cur_block
= this_block
;
318 VARRAY_FREE (insn_scopes
);
320 /* change_scope emits before the insn, not after. */
321 note
= emit_note (NULL
, NOTE_INSN_DELETED
);
322 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
328 /* Given a reorder chain, rearrange the code to match. */
331 fixup_reorder_chain ()
333 basic_block bb
, last_bb
;
336 int old_n_basic_blocks
= n_basic_blocks
;
338 /* First do the bulk reordering -- rechain the blocks without regard to
339 the needed changes to jumps and labels. */
341 for (last_bb
= BASIC_BLOCK (0), bb
= RBI (last_bb
)->next
, index
= 1;
343 last_bb
= bb
, bb
= RBI (bb
)->next
, index
++)
345 rtx last_e
= RBI (last_bb
)->eff_end
;
346 rtx curr_h
= RBI (bb
)->eff_head
;
348 NEXT_INSN (last_e
) = curr_h
;
349 PREV_INSN (curr_h
) = last_e
;
352 if (index
!= n_basic_blocks
)
355 insn
= RBI (last_bb
)->eff_end
;
356 NEXT_INSN (insn
) = function_tail_eff_head
;
357 if (function_tail_eff_head
)
358 PREV_INSN (function_tail_eff_head
) = insn
;
360 while (NEXT_INSN (insn
))
361 insn
= NEXT_INSN (insn
);
363 set_last_insn (insn
);
364 #ifdef ENABLE_CHECKING
365 verify_insn_chain ();
368 /* Now add jumps and labels as needed to match the blocks new
371 for (bb
= BASIC_BLOCK (0); bb
; bb
= RBI (bb
)->next
)
373 edge e_fall
, e_taken
, e
;
377 if (bb
->succ
== NULL
)
380 /* Find the old fallthru edge, and another non-EH edge for
382 e_taken
= e_fall
= NULL
;
383 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
384 if (e
->flags
& EDGE_FALLTHRU
)
386 else if (! (e
->flags
& EDGE_EH
))
389 bb_end_insn
= bb
->end
;
390 if (GET_CODE (bb_end_insn
) == JUMP_INSN
)
392 if (any_condjump_p (bb_end_insn
))
394 /* If the old fallthru is still next, nothing to do. */
395 if (RBI (bb
)->next
== e_fall
->dest
397 && e_fall
->dest
== EXIT_BLOCK_PTR
))
400 /* There is one special case: if *neither* block is next,
401 such as happens at the very end of a function, then we'll
402 need to add a new unconditional jump. Choose the taken
403 edge based on known or assumed probability. */
404 if (RBI (bb
)->next
!= e_taken
->dest
)
406 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
409 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
410 && invert_jump (bb_end_insn
,
411 label_for_bb (e_fall
->dest
), 0))
413 e_fall
->flags
&= ~EDGE_FALLTHRU
;
414 e_taken
->flags
|= EDGE_FALLTHRU
;
415 update_br_prob_note (bb
);
416 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
420 /* Otherwise we can try to invert the jump. This will
421 basically never fail, however, keep up the pretense. */
422 else if (invert_jump (bb_end_insn
,
423 label_for_bb (e_fall
->dest
), 0))
425 e_fall
->flags
&= ~EDGE_FALLTHRU
;
426 e_taken
->flags
|= EDGE_FALLTHRU
;
427 update_br_prob_note (bb
);
431 else if (returnjump_p (bb_end_insn
))
435 /* Otherwise we have some switch or computed jump. In the
436 99% case, there should not have been a fallthru edge. */
440 #ifdef CASE_DROPS_THROUGH
441 /* Except for VAX. Since we didn't have predication for the
442 tablejump, the fallthru block should not have moved. */
443 if (RBI (bb
)->next
== e_fall
->dest
)
445 bb_end_insn
= skip_insns_after_block (bb
);
453 /* No fallthru implies a noreturn function with EH edges, or
454 something similarly bizarre. In any case, we don't need to
459 /* If the fallthru block is still next, nothing to do. */
460 if (RBI (bb
)->next
== e_fall
->dest
)
463 /* A fallthru to exit block. */
464 if (!RBI (bb
)->next
&& e_fall
->dest
== EXIT_BLOCK_PTR
)
468 /* We got here if we need to add a new jump insn. */
469 nb
= force_nonfallthru (e_fall
);
472 alloc_aux_for_block (nb
, sizeof (struct reorder_block_def
));
473 RBI (nb
)->eff_head
= nb
->head
;
474 RBI (nb
)->eff_end
= NEXT_INSN (nb
->end
);
475 RBI (nb
)->visited
= 1;
476 RBI (nb
)->next
= RBI (bb
)->next
;
478 /* Don't process this new block. */
483 /* Put basic_block_info in the new order. */
484 bb
= BASIC_BLOCK (0);
488 fprintf (rtl_dump_file
, "Reordered sequence:\n");
490 for (; bb
; bb
= RBI (bb
)->next
, index
++)
493 fprintf (rtl_dump_file
, " %i %sbb %i freq %i\n", index
,
494 bb
->index
>= old_n_basic_blocks
? "compensation " : "",
499 BASIC_BLOCK (index
) = bb
;
503 /* Perform sanity checks on the insn chain.
504 1. Check that next/prev pointers are consistent in both the forward and
506 2. Count insns in chain, going both directions, and check if equal.
507 3. Check that get_last_insn () returns the actual end of chain. */
513 int insn_cnt1
, insn_cnt2
;
515 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
517 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
518 if (PREV_INSN (x
) != prevx
)
521 if (prevx
!= get_last_insn ())
524 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
526 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
527 if (NEXT_INSN (x
) != nextx
)
530 if (insn_cnt1
!= insn_cnt2
)
534 /* The block falling through to exit must be the last one in the reordered
535 chain. Ensure it is. */
538 fixup_fallthru_exit_predecessor ()
541 basic_block bb
= NULL
;
543 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
544 if (e
->flags
& EDGE_FALLTHRU
)
547 if (bb
&& RBI (bb
)->next
)
549 basic_block c
= BASIC_BLOCK (0);
551 while (RBI (c
)->next
!= bb
)
554 RBI (c
)->next
= RBI (bb
)->next
;
555 while (RBI (c
)->next
)
559 RBI (bb
)->next
= NULL
;
563 /* Main entry point to this module: initialize the datastructures for CFG
567 cfg_layout_initialize ()
569 alloc_aux_for_blocks (sizeof (struct reorder_block_def
));
571 scope_to_insns_initialize ();
573 record_effective_endpoints ();
576 /* Finalize the changes: reorder insn list according to the sequence, enter
577 compensation code, rebuild scope forest. */
580 cfg_layout_finalize ()
582 fixup_fallthru_exit_predecessor ();
583 fixup_reorder_chain ();
585 #ifdef ENABLE_CHECKING
586 verify_insn_chain ();
589 scope_to_insns_finalize ();
591 free_aux_for_blocks ();
593 #ifdef ENABLE_CHECKING