* xref.c (FILE_NAME_ABSOLUTE_P): Add parenthesis.
[official-gcc.git] / gcc / cfglayout.c
blobef5206bb69aa6a155c5e3e14b7bb3fbf779293f5
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "tree.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "function.h"
30 #include "obstack.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_tail_eff_head;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 /* Map insn uid to lexical block. */
52 static varray_type insn_scopes;
54 /* Skip over inter-block insns occurring after BB which are typically
55 associated with BB (e.g., barriers). If there are any such insns,
56 we return the last one. Otherwise, we return the end of BB. */
58 static rtx
59 skip_insns_after_block (bb)
60 basic_block bb;
62 rtx insn, last_insn, next_head, prev;
64 next_head = NULL_RTX;
65 if (bb->index + 1 != n_basic_blocks)
66 next_head = BASIC_BLOCK (bb->index + 1)->head;
68 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
70 if (insn == next_head)
71 break;
73 switch (GET_CODE (insn))
75 case BARRIER:
76 last_insn = insn;
77 continue;
79 case NOTE:
80 switch (NOTE_LINE_NUMBER (insn))
82 case NOTE_INSN_LOOP_END:
83 case NOTE_INSN_BLOCK_END:
84 last_insn = insn;
85 continue;
86 case NOTE_INSN_DELETED:
87 case NOTE_INSN_DELETED_LABEL:
88 continue;
90 default:
91 continue;
92 break;
94 break;
96 case CODE_LABEL:
97 if (NEXT_INSN (insn)
98 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
99 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
100 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
102 insn = NEXT_INSN (insn);
103 last_insn = insn;
104 continue;
106 break;
108 default:
109 break;
112 break;
115 /* It is possible to hit contradictory sequence. For instance:
117 jump_insn
118 NOTE_INSN_LOOP_BEG
119 barrier
121 Where barrier belongs to jump_insn, but the note does not. This can be
122 created by removing the basic block originally following
123 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
125 for (insn = last_insn; insn != bb->end; insn = prev)
127 prev = PREV_INSN (insn);
128 if (GET_CODE (insn) == NOTE)
129 switch (NOTE_LINE_NUMBER (insn))
131 case NOTE_INSN_LOOP_END:
132 case NOTE_INSN_BLOCK_END:
133 case NOTE_INSN_DELETED:
134 case NOTE_INSN_DELETED_LABEL:
135 continue;
136 default:
137 reorder_insns (insn, insn, last_insn);
141 return last_insn;
144 /* Locate or create a label for a given basic block. */
146 static rtx
147 label_for_bb (bb)
148 basic_block bb;
150 rtx label = bb->head;
152 if (GET_CODE (label) != CODE_LABEL)
154 if (rtl_dump_file)
155 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
157 label = block_label (bb);
158 if (bb->head == PREV_INSN (RBI (bb)->eff_head))
159 RBI (bb)->eff_head = label;
162 return label;
165 /* Locate the effective beginning and end of the insn chain for each
166 block, as defined by skip_insns_after_block above. */
168 static void
169 record_effective_endpoints ()
171 rtx next_insn = get_insns ();
172 int i;
174 for (i = 0; i < n_basic_blocks; i++)
176 basic_block bb = BASIC_BLOCK (i);
177 rtx end;
179 RBI (bb)->eff_head = next_insn;
180 end = skip_insns_after_block (bb);
181 RBI (bb)->eff_end = end;
182 next_insn = NEXT_INSN (end);
185 function_tail_eff_head = next_insn;
188 /* Build a varray mapping INSN_UID to lexical block. Return it. */
190 void
191 scope_to_insns_initialize ()
193 tree block = NULL;
194 rtx insn, next;
196 VARRAY_TREE_INIT (insn_scopes, get_max_uid (), "insn scopes");
198 for (insn = get_insns (); insn; insn = next)
200 next = NEXT_INSN (insn);
202 if (active_insn_p (insn)
203 && GET_CODE (PATTERN (insn)) != ADDR_VEC
204 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
205 VARRAY_TREE (insn_scopes, INSN_UID (insn)) = block;
206 else if (GET_CODE (insn) == NOTE)
208 switch (NOTE_LINE_NUMBER (insn))
210 case NOTE_INSN_BLOCK_BEG:
211 block = NOTE_BLOCK (insn);
212 delete_insn (insn);
213 break;
214 case NOTE_INSN_BLOCK_END:
215 block = BLOCK_SUPERCONTEXT (block);
216 delete_insn (insn);
217 break;
218 default:
219 break;
225 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
226 found in the block tree. */
228 static void
229 set_block_levels (block, level)
230 tree block;
231 int level;
233 while (block)
235 BLOCK_NUMBER (block) = level;
236 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
237 block = BLOCK_CHAIN (block);
241 /* Emit lexical block notes needed to change scope from S1 to S2. */
243 static void
244 change_scope (orig_insn, s1, s2)
245 rtx orig_insn;
246 tree s1, s2;
248 rtx insn = orig_insn;
249 tree com = NULL_TREE;
250 tree ts1 = s1, ts2 = s2;
251 tree s;
253 while (ts1 != ts2)
255 if (ts1 == NULL || ts2 == NULL)
256 abort ();
257 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
258 ts1 = BLOCK_SUPERCONTEXT (ts1);
259 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
260 ts2 = BLOCK_SUPERCONTEXT (ts2);
261 else
263 ts1 = BLOCK_SUPERCONTEXT (ts1);
264 ts2 = BLOCK_SUPERCONTEXT (ts2);
267 com = ts1;
269 /* Close scopes. */
270 s = s1;
271 while (s != com)
273 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
274 NOTE_BLOCK (note) = s;
275 s = BLOCK_SUPERCONTEXT (s);
278 /* Open scopes. */
279 s = s2;
280 while (s != com)
282 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
283 NOTE_BLOCK (insn) = s;
284 s = BLOCK_SUPERCONTEXT (s);
288 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
289 on the scope tree and the newly reordered instructions. */
291 void
292 scope_to_insns_finalize ()
294 tree cur_block = DECL_INITIAL (cfun->decl);
295 rtx insn, note;
297 /* Tag the blocks with a depth number so that change_scope can find
298 the common parent easily. */
299 set_block_levels (cur_block, 0);
301 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
303 tree this_block;
305 if ((size_t) INSN_UID (insn) >= insn_scopes->num_elements)
306 continue;
307 this_block = VARRAY_TREE (insn_scopes, INSN_UID (insn));
308 if (! this_block)
309 continue;
311 if (this_block != cur_block)
313 change_scope (insn, cur_block, this_block);
314 cur_block = this_block;
318 VARRAY_FREE (insn_scopes);
320 /* change_scope emits before the insn, not after. */
321 note = emit_note (NULL, NOTE_INSN_DELETED);
322 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
323 delete_insn (note);
325 reorder_blocks ();
328 /* Given a reorder chain, rearrange the code to match. */
330 static void
331 fixup_reorder_chain ()
333 basic_block bb, last_bb;
334 int index;
335 rtx insn;
336 int old_n_basic_blocks = n_basic_blocks;
338 /* First do the bulk reordering -- rechain the blocks without regard to
339 the needed changes to jumps and labels. */
341 for (last_bb = BASIC_BLOCK (0), bb = RBI (last_bb)->next, index = 1;
342 bb != 0;
343 last_bb = bb, bb = RBI (bb)->next, index++)
345 rtx last_e = RBI (last_bb)->eff_end;
346 rtx curr_h = RBI (bb)->eff_head;
348 NEXT_INSN (last_e) = curr_h;
349 PREV_INSN (curr_h) = last_e;
352 if (index != n_basic_blocks)
353 abort ();
355 insn = RBI (last_bb)->eff_end;
356 NEXT_INSN (insn) = function_tail_eff_head;
357 if (function_tail_eff_head)
358 PREV_INSN (function_tail_eff_head) = insn;
360 while (NEXT_INSN (insn))
361 insn = NEXT_INSN (insn);
363 set_last_insn (insn);
364 #ifdef ENABLE_CHECKING
365 verify_insn_chain ();
366 #endif
368 /* Now add jumps and labels as needed to match the blocks new
369 outgoing edges. */
371 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
373 edge e_fall, e_taken, e;
374 rtx bb_end_insn;
375 basic_block nb;
377 if (bb->succ == NULL)
378 continue;
380 /* Find the old fallthru edge, and another non-EH edge for
381 a taken jump. */
382 e_taken = e_fall = NULL;
383 for (e = bb->succ; e ; e = e->succ_next)
384 if (e->flags & EDGE_FALLTHRU)
385 e_fall = e;
386 else if (! (e->flags & EDGE_EH))
387 e_taken = e;
389 bb_end_insn = bb->end;
390 if (GET_CODE (bb_end_insn) == JUMP_INSN)
392 if (any_condjump_p (bb_end_insn))
394 /* If the old fallthru is still next, nothing to do. */
395 if (RBI (bb)->next == e_fall->dest
396 || (!RBI (bb)->next
397 && e_fall->dest == EXIT_BLOCK_PTR))
398 continue;
400 /* There is one special case: if *neither* block is next,
401 such as happens at the very end of a function, then we'll
402 need to add a new unconditional jump. Choose the taken
403 edge based on known or assumed probability. */
404 if (RBI (bb)->next != e_taken->dest)
406 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
408 if (note
409 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
410 && invert_jump (bb_end_insn,
411 label_for_bb (e_fall->dest), 0))
413 e_fall->flags &= ~EDGE_FALLTHRU;
414 e_taken->flags |= EDGE_FALLTHRU;
415 e = e_fall, e_fall = e_taken, e_taken = e;
419 /* Otherwise we can try to invert the jump. This will
420 basically never fail, however, keep up the pretense. */
421 else if (invert_jump (bb_end_insn,
422 label_for_bb (e_fall->dest), 0))
424 e_fall->flags &= ~EDGE_FALLTHRU;
425 e_taken->flags |= EDGE_FALLTHRU;
426 continue;
429 else if (returnjump_p (bb_end_insn))
430 continue;
431 else
433 /* Otherwise we have some switch or computed jump. In the
434 99% case, there should not have been a fallthru edge. */
435 if (! e_fall)
436 continue;
438 #ifdef CASE_DROPS_THROUGH
439 /* Except for VAX. Since we didn't have predication for the
440 tablejump, the fallthru block should not have moved. */
441 if (RBI (bb)->next == e_fall->dest)
442 continue;
443 bb_end_insn = skip_insns_after_block (bb);
444 #else
445 abort ();
446 #endif
449 else
451 /* No fallthru implies a noreturn function with EH edges, or
452 something similarly bizarre. In any case, we don't need to
453 do anything. */
454 if (! e_fall)
455 continue;
457 /* If the fallthru block is still next, nothing to do. */
458 if (RBI (bb)->next == e_fall->dest)
459 continue;
461 /* A fallthru to exit block. */
462 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
463 continue;
466 /* We got here if we need to add a new jump insn. */
467 nb = force_nonfallthru (e_fall);
468 if (nb)
470 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
471 RBI (nb)->eff_head = nb->head;
472 RBI (nb)->eff_end = NEXT_INSN (nb->end);
473 RBI (nb)->visited = 1;
474 RBI (nb)->next = RBI (bb)->next;
475 RBI (bb)->next = nb;
476 /* Don't process this new block. */
477 bb = nb;
481 /* Put basic_block_info in the new order. */
482 bb = BASIC_BLOCK (0);
483 index = 0;
485 if (rtl_dump_file)
486 fprintf (rtl_dump_file, "Reordered sequence:\n");
488 for (; bb; bb = RBI (bb)->next, index++)
490 if (rtl_dump_file)
491 fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
492 bb->index >= old_n_basic_blocks ? "compensation " : "",
493 bb->index,
494 bb->frequency);
496 bb->index = index;
497 BASIC_BLOCK (index) = bb;
501 /* Perform sanity checks on the insn chain.
502 1. Check that next/prev pointers are consistent in both the forward and
503 reverse direction.
504 2. Count insns in chain, going both directions, and check if equal.
505 3. Check that get_last_insn () returns the actual end of chain. */
507 void
508 verify_insn_chain ()
510 rtx x, prevx, nextx;
511 int insn_cnt1, insn_cnt2;
513 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
514 x != 0;
515 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
516 if (PREV_INSN (x) != prevx)
517 abort ();
519 if (prevx != get_last_insn ())
520 abort ();
522 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
523 x != 0;
524 nextx = x, insn_cnt2++, x = PREV_INSN (x))
525 if (NEXT_INSN (x) != nextx)
526 abort ();
528 if (insn_cnt1 != insn_cnt2)
529 abort ();
532 /* The block falling through to exit must be the last one in the reordered
533 chain. Ensure it is. */
535 static void
536 fixup_fallthru_exit_predecessor ()
538 edge e;
539 basic_block bb = NULL;
541 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
542 if (e->flags & EDGE_FALLTHRU)
543 bb = e->src;
545 if (bb && RBI (bb)->next)
547 basic_block c = BASIC_BLOCK (0);
549 while (RBI (c)->next != bb)
550 c = RBI (c)->next;
552 RBI (c)->next = RBI (bb)->next;
553 while (RBI (c)->next)
554 c = RBI (c)->next;
556 RBI (c)->next = bb;
557 RBI (bb)->next = NULL;
561 /* Main entry point to this module: initialize the datastructures for CFG
562 layout changes. */
564 void
565 cfg_layout_initialize ()
567 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
569 scope_to_insns_initialize ();
571 record_effective_endpoints ();
574 /* Finalize the changes: reorder insn list according to the sequence, enter
575 compensation code, rebuild scope forest. */
577 void
578 cfg_layout_finalize ()
580 fixup_fallthru_exit_predecessor ();
581 fixup_reorder_chain ();
583 #ifdef ENABLE_CHECKING
584 verify_insn_chain ();
585 #endif
587 scope_to_insns_finalize ();
589 free_aux_for_blocks ();
591 #ifdef ENABLE_CHECKING
592 verify_flow_info ();
593 #endif