* config.gcc: Set cpu_type to m68k for 68010, as well.
[official-gcc.git] / gcc / cfglayout.c
blob329e9f80b18cc738bcd6efc44c723b9da93ca218
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "tree.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "function.h"
30 #include "obstack.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_tail_eff_head;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 /* Map insn uid to lexical block. */
52 static varray_type insn_scopes;
54 /* Skip over inter-block insns occurring after BB which are typically
55 associated with BB (e.g., barriers). If there are any such insns,
56 we return the last one. Otherwise, we return the end of BB. */
58 static rtx
59 skip_insns_after_block (bb)
60 basic_block bb;
62 rtx insn, last_insn, next_head, prev;
64 next_head = NULL_RTX;
65 if (bb->index + 1 != n_basic_blocks)
66 next_head = BASIC_BLOCK (bb->index + 1)->head;
68 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
70 if (insn == next_head)
71 break;
73 switch (GET_CODE (insn))
75 case BARRIER:
76 last_insn = insn;
77 continue;
79 case NOTE:
80 switch (NOTE_LINE_NUMBER (insn))
82 case NOTE_INSN_LOOP_END:
83 case NOTE_INSN_BLOCK_END:
84 last_insn = insn;
85 continue;
86 case NOTE_INSN_DELETED:
87 case NOTE_INSN_DELETED_LABEL:
88 continue;
90 default:
91 continue;
92 break;
94 break;
96 case CODE_LABEL:
97 if (NEXT_INSN (insn)
98 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
99 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
100 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
102 insn = NEXT_INSN (insn);
103 last_insn = insn;
104 continue;
106 break;
108 default:
109 break;
112 break;
115 /* It is possible to hit contradictory sequence. For instance:
117 jump_insn
118 NOTE_INSN_LOOP_BEG
119 barrier
121 Where barrier belongs to jump_insn, but the note does not. This can be
122 created by removing the basic block originally following
123 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
125 for (insn = last_insn; insn != bb->end; insn = prev)
127 prev = PREV_INSN (insn);
128 if (GET_CODE (insn) == NOTE)
129 switch (NOTE_LINE_NUMBER (insn))
131 case NOTE_INSN_LOOP_END:
132 case NOTE_INSN_BLOCK_END:
133 case NOTE_INSN_DELETED:
134 case NOTE_INSN_DELETED_LABEL:
135 continue;
136 default:
137 reorder_insns (insn, insn, last_insn);
141 return last_insn;
144 /* Locate or create a label for a given basic block. */
146 static rtx
147 label_for_bb (bb)
148 basic_block bb;
150 rtx label = bb->head;
152 if (GET_CODE (label) != CODE_LABEL)
154 if (rtl_dump_file)
155 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
157 label = block_label (bb);
158 if (bb->head == PREV_INSN (RBI (bb)->eff_head))
159 RBI (bb)->eff_head = label;
162 return label;
165 /* Locate the effective beginning and end of the insn chain for each
166 block, as defined by skip_insns_after_block above. */
168 static void
169 record_effective_endpoints ()
171 rtx next_insn = get_insns ();
172 int i;
174 for (i = 0; i < n_basic_blocks; i++)
176 basic_block bb = BASIC_BLOCK (i);
177 rtx end;
179 RBI (bb)->eff_head = next_insn;
180 end = skip_insns_after_block (bb);
181 RBI (bb)->eff_end = end;
182 next_insn = NEXT_INSN (end);
185 function_tail_eff_head = next_insn;
188 /* Build a varray mapping INSN_UID to lexical block. Return it. */
190 void
191 scope_to_insns_initialize ()
193 tree block = NULL;
194 rtx insn, next;
196 VARRAY_TREE_INIT (insn_scopes, get_max_uid (), "insn scopes");
198 for (insn = get_insns (); insn; insn = next)
200 next = NEXT_INSN (insn);
202 if (active_insn_p (insn)
203 && GET_CODE (PATTERN (insn)) != ADDR_VEC
204 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
205 VARRAY_TREE (insn_scopes, INSN_UID (insn)) = block;
206 else if (GET_CODE (insn) == NOTE)
208 switch (NOTE_LINE_NUMBER (insn))
210 case NOTE_INSN_BLOCK_BEG:
211 block = NOTE_BLOCK (insn);
212 delete_insn (insn);
213 break;
214 case NOTE_INSN_BLOCK_END:
215 block = BLOCK_SUPERCONTEXT (block);
216 delete_insn (insn);
217 break;
218 default:
219 break;
225 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
226 found in the block tree. */
228 static void
229 set_block_levels (block, level)
230 tree block;
231 int level;
233 while (block)
235 BLOCK_NUMBER (block) = level;
236 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
237 block = BLOCK_CHAIN (block);
241 /* Emit lexical block notes needed to change scope from S1 to S2. */
243 static void
244 change_scope (orig_insn, s1, s2)
245 rtx orig_insn;
246 tree s1, s2;
248 rtx insn = orig_insn;
249 tree com = NULL_TREE;
250 tree ts1 = s1, ts2 = s2;
251 tree s;
253 while (ts1 != ts2)
255 if (ts1 == NULL || ts2 == NULL)
256 abort ();
257 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
258 ts1 = BLOCK_SUPERCONTEXT (ts1);
259 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
260 ts2 = BLOCK_SUPERCONTEXT (ts2);
261 else
263 ts1 = BLOCK_SUPERCONTEXT (ts1);
264 ts2 = BLOCK_SUPERCONTEXT (ts2);
267 com = ts1;
269 /* Close scopes. */
270 s = s1;
271 while (s != com)
273 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
274 NOTE_BLOCK (note) = s;
275 s = BLOCK_SUPERCONTEXT (s);
278 /* Open scopes. */
279 s = s2;
280 while (s != com)
282 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
283 NOTE_BLOCK (insn) = s;
284 s = BLOCK_SUPERCONTEXT (s);
288 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
289 on the scope tree and the newly reordered instructions. */
291 void
292 scope_to_insns_finalize ()
294 tree cur_block = DECL_INITIAL (cfun->decl);
295 rtx insn, note;
297 /* Tag the blocks with a depth number so that change_scope can find
298 the common parent easily. */
299 set_block_levels (cur_block, 0);
301 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
303 tree this_block;
305 if ((size_t) INSN_UID (insn) >= insn_scopes->num_elements)
306 continue;
307 this_block = VARRAY_TREE (insn_scopes, INSN_UID (insn));
308 if (! this_block)
309 continue;
311 if (this_block != cur_block)
313 change_scope (insn, cur_block, this_block);
314 cur_block = this_block;
318 VARRAY_FREE (insn_scopes);
320 /* change_scope emits before the insn, not after. */
321 note = emit_note (NULL, NOTE_INSN_DELETED);
322 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
323 delete_insn (note);
325 reorder_blocks ();
328 /* Given a reorder chain, rearrange the code to match. */
330 static void
331 fixup_reorder_chain ()
333 basic_block bb, last_bb;
334 int index;
335 rtx insn;
336 int old_n_basic_blocks = n_basic_blocks;
338 /* First do the bulk reordering -- rechain the blocks without regard to
339 the needed changes to jumps and labels. */
341 for (last_bb = BASIC_BLOCK (0), bb = RBI (last_bb)->next, index = 1;
342 bb != 0;
343 last_bb = bb, bb = RBI (bb)->next, index++)
345 rtx last_e = RBI (last_bb)->eff_end;
346 rtx curr_h = RBI (bb)->eff_head;
348 NEXT_INSN (last_e) = curr_h;
349 PREV_INSN (curr_h) = last_e;
352 if (index != n_basic_blocks)
353 abort ();
355 insn = RBI (last_bb)->eff_end;
356 NEXT_INSN (insn) = function_tail_eff_head;
357 if (function_tail_eff_head)
358 PREV_INSN (function_tail_eff_head) = insn;
360 while (NEXT_INSN (insn))
361 insn = NEXT_INSN (insn);
363 set_last_insn (insn);
364 #ifdef ENABLE_CHECKING
365 verify_insn_chain ();
366 #endif
368 /* Now add jumps and labels as needed to match the blocks new
369 outgoing edges. */
371 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
373 edge e_fall, e_taken, e;
374 rtx bb_end_insn;
375 basic_block nb;
377 if (bb->succ == NULL)
378 continue;
380 /* Find the old fallthru edge, and another non-EH edge for
381 a taken jump. */
382 e_taken = e_fall = NULL;
383 for (e = bb->succ; e ; e = e->succ_next)
384 if (e->flags & EDGE_FALLTHRU)
385 e_fall = e;
386 else if (! (e->flags & EDGE_EH))
387 e_taken = e;
389 bb_end_insn = bb->end;
390 if (GET_CODE (bb_end_insn) == JUMP_INSN)
392 if (any_condjump_p (bb_end_insn))
394 /* If the old fallthru is still next, nothing to do. */
395 if (RBI (bb)->next == e_fall->dest
396 || (!RBI (bb)->next
397 && e_fall->dest == EXIT_BLOCK_PTR))
398 continue;
400 /* There is one special case: if *neither* block is next,
401 such as happens at the very end of a function, then we'll
402 need to add a new unconditional jump. Choose the taken
403 edge based on known or assumed probability. */
404 if (RBI (bb)->next != e_taken->dest)
406 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
408 if (note
409 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
410 && invert_jump (bb_end_insn,
411 label_for_bb (e_fall->dest), 0))
413 e_fall->flags &= ~EDGE_FALLTHRU;
414 e_taken->flags |= EDGE_FALLTHRU;
415 update_br_prob_note (bb);
416 e = e_fall, e_fall = e_taken, e_taken = e;
420 /* Otherwise we can try to invert the jump. This will
421 basically never fail, however, keep up the pretense. */
422 else if (invert_jump (bb_end_insn,
423 label_for_bb (e_fall->dest), 0))
425 e_fall->flags &= ~EDGE_FALLTHRU;
426 e_taken->flags |= EDGE_FALLTHRU;
427 update_br_prob_note (bb);
428 continue;
431 else if (returnjump_p (bb_end_insn))
432 continue;
433 else
435 /* Otherwise we have some switch or computed jump. In the
436 99% case, there should not have been a fallthru edge. */
437 if (! e_fall)
438 continue;
440 #ifdef CASE_DROPS_THROUGH
441 /* Except for VAX. Since we didn't have predication for the
442 tablejump, the fallthru block should not have moved. */
443 if (RBI (bb)->next == e_fall->dest)
444 continue;
445 bb_end_insn = skip_insns_after_block (bb);
446 #else
447 abort ();
448 #endif
451 else
453 /* No fallthru implies a noreturn function with EH edges, or
454 something similarly bizarre. In any case, we don't need to
455 do anything. */
456 if (! e_fall)
457 continue;
459 /* If the fallthru block is still next, nothing to do. */
460 if (RBI (bb)->next == e_fall->dest)
461 continue;
463 /* A fallthru to exit block. */
464 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
465 continue;
468 /* We got here if we need to add a new jump insn. */
469 nb = force_nonfallthru (e_fall);
470 if (nb)
472 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
473 RBI (nb)->eff_head = nb->head;
474 RBI (nb)->eff_end = NEXT_INSN (nb->end);
475 RBI (nb)->visited = 1;
476 RBI (nb)->next = RBI (bb)->next;
477 RBI (bb)->next = nb;
478 /* Don't process this new block. */
479 bb = nb;
483 /* Put basic_block_info in the new order. */
484 bb = BASIC_BLOCK (0);
485 index = 0;
487 if (rtl_dump_file)
488 fprintf (rtl_dump_file, "Reordered sequence:\n");
490 for (; bb; bb = RBI (bb)->next, index++)
492 if (rtl_dump_file)
493 fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
494 bb->index >= old_n_basic_blocks ? "compensation " : "",
495 bb->index,
496 bb->frequency);
498 bb->index = index;
499 BASIC_BLOCK (index) = bb;
503 /* Perform sanity checks on the insn chain.
504 1. Check that next/prev pointers are consistent in both the forward and
505 reverse direction.
506 2. Count insns in chain, going both directions, and check if equal.
507 3. Check that get_last_insn () returns the actual end of chain. */
509 void
510 verify_insn_chain ()
512 rtx x, prevx, nextx;
513 int insn_cnt1, insn_cnt2;
515 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
516 x != 0;
517 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
518 if (PREV_INSN (x) != prevx)
519 abort ();
521 if (prevx != get_last_insn ())
522 abort ();
524 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
525 x != 0;
526 nextx = x, insn_cnt2++, x = PREV_INSN (x))
527 if (NEXT_INSN (x) != nextx)
528 abort ();
530 if (insn_cnt1 != insn_cnt2)
531 abort ();
534 /* The block falling through to exit must be the last one in the reordered
535 chain. Ensure it is. */
537 static void
538 fixup_fallthru_exit_predecessor ()
540 edge e;
541 basic_block bb = NULL;
543 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
544 if (e->flags & EDGE_FALLTHRU)
545 bb = e->src;
547 if (bb && RBI (bb)->next)
549 basic_block c = BASIC_BLOCK (0);
551 while (RBI (c)->next != bb)
552 c = RBI (c)->next;
554 RBI (c)->next = RBI (bb)->next;
555 while (RBI (c)->next)
556 c = RBI (c)->next;
558 RBI (c)->next = bb;
559 RBI (bb)->next = NULL;
563 /* Main entry point to this module: initialize the datastructures for CFG
564 layout changes. */
566 void
567 cfg_layout_initialize ()
569 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
571 scope_to_insns_initialize ();
573 record_effective_endpoints ();
576 /* Finalize the changes: reorder insn list according to the sequence, enter
577 compensation code, rebuild scope forest. */
579 void
580 cfg_layout_finalize ()
582 fixup_fallthru_exit_predecessor ();
583 fixup_reorder_chain ();
585 #ifdef ENABLE_CHECKING
586 verify_insn_chain ();
587 #endif
589 scope_to_insns_finalize ();
591 free_aux_for_blocks ();
593 #ifdef ENABLE_CHECKING
594 verify_flow_info ();
595 #endif