Revert REG_CFA_TEMPORARY changes, bring in 32-bit fixes.
[official-gcc.git] / gcc / cfgbuild.c
blob4ee5a5a59b5629de9b92b46a484356562e2b5490
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "function.h"
35 #include "except.h"
36 #include "expr.h"
37 #include "diagnostic-core.h"
38 #include "toplev.h"
39 #include "timevar.h"
40 #include "sbitmap.h"
42 static void make_edges (basic_block, basic_block, int);
43 static void find_bb_boundaries (basic_block);
44 static void compute_outgoing_frequencies (basic_block);
46 /* Return true if insn is something that should be contained inside basic
47 block. */
49 bool
50 inside_basic_block_p (const_rtx insn)
52 switch (GET_CODE (insn))
54 case CODE_LABEL:
55 /* Avoid creating of basic block for jumptables. */
56 return (NEXT_INSN (insn) == 0
57 || !JUMP_P (NEXT_INSN (insn))
58 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
59 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
61 case JUMP_INSN:
62 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
63 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
65 case CALL_INSN:
66 case INSN:
67 case DEBUG_INSN:
68 return true;
70 case BARRIER:
71 case NOTE:
72 return false;
74 default:
75 gcc_unreachable ();
79 /* Return true if INSN may cause control flow transfer, so it should be last in
80 the basic block. */
82 bool
83 control_flow_insn_p (const_rtx insn)
85 switch (GET_CODE (insn))
87 case NOTE:
88 case CODE_LABEL:
89 case DEBUG_INSN:
90 return false;
92 case JUMP_INSN:
93 /* Jump insn always causes control transfer except for tablejumps. */
94 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
95 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
97 case CALL_INSN:
98 /* Noreturn and sibling call instructions terminate the basic blocks
99 (but only if they happen unconditionally). */
100 if ((SIBLING_CALL_P (insn)
101 || find_reg_note (insn, REG_NORETURN, 0))
102 && GET_CODE (PATTERN (insn)) != COND_EXEC)
103 return true;
105 /* Call insn may return to the nonlocal goto handler. */
106 if (can_nonlocal_goto (insn))
107 return true;
108 break;
110 case INSN:
111 /* Treat trap instructions like noreturn calls (same provision). */
112 if (GET_CODE (PATTERN (insn)) == TRAP_IF
113 && XEXP (PATTERN (insn), 0) == const1_rtx)
114 return true;
115 if (!cfun->can_throw_non_call_exceptions)
116 return false;
117 break;
119 case BARRIER:
120 /* It is nonsense to reach barrier when looking for the
121 end of basic block, but before dead code is eliminated
122 this may happen. */
123 return false;
125 default:
126 gcc_unreachable ();
129 return can_throw_internal (insn);
133 /* Create an edge between two basic blocks. */
135 /* Create an edge from basic block SRC to LABEL. Cache edges in
136 EDGE_CACHE. If INSN is not NULL it is the jump insn which caused
137 this edge to be created. FLAGS are auxiliary information about the
138 edge that is accumulated between calls. */
140 static void
141 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, rtx insn,
142 int flags)
144 edge e;
146 gcc_assert (LABEL_P (label));
148 /* If the label was never emitted, this insn is junk, but avoid a
149 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
150 as a result of a syntax error and a diagnostic has already been
151 printed. */
153 if (INSN_UID (label) == 0)
154 return;
156 e = cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
158 if (e != NULL && insn != NULL_RTX)
160 rtx note = find_reg_note (insn, REG_BR_PROB, 0);
161 if (note != NULL_RTX)
162 e->probability = INTVAL (XEXP (note, 0));
166 /* Create the edges generated by INSN in REGION. */
168 void
169 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
171 eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn);
173 if (lp)
175 rtx label = lp->landing_pad;
177 /* During initial rtl generation, use the post_landing_pad. */
178 if (label == NULL)
180 gcc_assert (lp->post_landing_pad);
181 label = label_rtx (lp->post_landing_pad);
184 make_label_edge (edge_cache, src, label, NULL_RTX,
185 EDGE_ABNORMAL | EDGE_EH
186 | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0));
190 /* States of basic block as seen by find_many_sub_basic_blocks. */
191 enum state {
192 /* Basic blocks created via split_block belong to this state.
193 make_edges will examine these basic blocks to see if we need to
194 create edges going out of them. */
195 BLOCK_NEW = 0,
197 /* Basic blocks that do not need examining belong to this state.
198 These blocks will be left intact. In particular, make_edges will
199 not create edges going out of these basic blocks. */
200 BLOCK_ORIGINAL,
202 /* Basic blocks that may need splitting (due to a label appearing in
203 the middle, etc) belong to this state. After splitting them,
204 make_edges will create edges going out of them as needed. */
205 BLOCK_TO_SPLIT
208 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
209 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
211 /* Used internally by purge_dead_tablejump_edges, ORed into state. */
212 #define BLOCK_USED_BY_TABLEJUMP 32
213 #define FULL_STATE(BB) ((size_t) (BB)->aux)
215 /* Identify the edges going out of basic blocks between MIN and MAX,
216 inclusive, that have their states set to BLOCK_NEW or
217 BLOCK_TO_SPLIT.
219 UPDATE_P should be nonzero if we are updating CFG and zero if we
220 are building CFG from scratch. */
222 static void
223 make_edges (basic_block min, basic_block max, int update_p)
225 basic_block bb;
226 sbitmap edge_cache = NULL;
228 /* Heavy use of computed goto in machine-generated code can lead to
229 nearly fully-connected CFGs. In that case we spend a significant
230 amount of time searching the edge lists for duplicates. */
231 if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
232 edge_cache = sbitmap_alloc (last_basic_block);
234 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
235 is always the entry. */
236 if (min == ENTRY_BLOCK_PTR->next_bb)
237 make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
239 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
241 rtx insn, x;
242 enum rtx_code code;
243 edge e;
244 edge_iterator ei;
246 if (STATE (bb) == BLOCK_ORIGINAL)
247 continue;
249 /* If we have an edge cache, cache edges going out of BB. */
250 if (edge_cache)
252 sbitmap_zero (edge_cache);
253 if (update_p)
255 FOR_EACH_EDGE (e, ei, bb->succs)
256 if (e->dest != EXIT_BLOCK_PTR)
257 SET_BIT (edge_cache, e->dest->index);
261 if (LABEL_P (BB_HEAD (bb))
262 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
263 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
265 /* Examine the last instruction of the block, and discover the
266 ways we can leave the block. */
268 insn = BB_END (bb);
269 code = GET_CODE (insn);
271 /* A branch. */
272 if (code == JUMP_INSN)
274 rtx tmp;
276 /* Recognize a non-local goto as a branch outside the
277 current function. */
278 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
281 /* Recognize a tablejump and do the right thing. */
282 else if (tablejump_p (insn, NULL, &tmp))
284 rtvec vec;
285 int j;
287 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
288 vec = XVEC (PATTERN (tmp), 0);
289 else
290 vec = XVEC (PATTERN (tmp), 1);
292 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
293 make_label_edge (edge_cache, bb,
294 XEXP (RTVEC_ELT (vec, j), 0), NULL_RTX, 0);
296 /* Some targets (eg, ARM) emit a conditional jump that also
297 contains the out-of-range target. Scan for these and
298 add an edge if necessary. */
299 if ((tmp = single_set (insn)) != NULL
300 && SET_DEST (tmp) == pc_rtx
301 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
302 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
303 make_label_edge (edge_cache, bb,
304 XEXP (XEXP (SET_SRC (tmp), 2), 0),
305 NULL_RTX, 0);
308 /* If this is a computed jump, then mark it as reaching
309 everything on the forced_labels list. */
310 else if (computed_jump_p (insn))
312 for (x = forced_labels; x; x = XEXP (x, 1))
313 make_label_edge (edge_cache, bb, XEXP (x, 0), insn,
314 EDGE_ABNORMAL);
317 /* Returns create an exit out. */
318 else if (returnjump_p (insn))
319 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
321 /* Recognize asm goto and do the right thing. */
322 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
324 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
325 for (i = 0; i < n; ++i)
326 make_label_edge (edge_cache, bb,
327 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0),
328 NULL_RTX, 0);
331 /* Otherwise, we have a plain conditional or unconditional jump. */
332 else
334 gcc_assert (JUMP_LABEL (insn));
335 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), insn, 0);
339 /* If this is a sibling call insn, then this is in effect a combined call
340 and return, and so we need an edge to the exit block. No need to
341 worry about EH edges, since we wouldn't have created the sibling call
342 in the first place. */
343 if (code == CALL_INSN && SIBLING_CALL_P (insn))
344 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
345 EDGE_SIBCALL | EDGE_ABNORMAL);
347 /* If this is a CALL_INSN, then mark it as reaching the active EH
348 handler for this CALL_INSN. If we're handling non-call
349 exceptions then any insn can reach any of the active handlers.
350 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
351 else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
353 /* Add any appropriate EH edges. */
354 rtl_make_eh_edge (edge_cache, bb, insn);
356 if (code == CALL_INSN && nonlocal_goto_handler_labels)
358 /* ??? This could be made smarter: in some cases it's possible
359 to tell that certain calls will not do a nonlocal goto.
360 For example, if the nested functions that do the nonlocal
361 gotos do not have their addresses taken, then only calls to
362 those functions or to other nested functions that use them
363 could possibly do nonlocal gotos. */
364 if (can_nonlocal_goto (insn))
365 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
366 make_label_edge (edge_cache, bb, XEXP (x, 0), NULL_RTX,
367 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
371 /* Find out if we can drop through to the next block. */
372 insn = NEXT_INSN (insn);
373 e = find_edge (bb, EXIT_BLOCK_PTR);
374 if (e && e->flags & EDGE_FALLTHRU)
375 insn = NULL;
377 while (insn
378 && NOTE_P (insn)
379 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
380 insn = NEXT_INSN (insn);
382 if (!insn)
383 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
384 else if (bb->next_bb != EXIT_BLOCK_PTR)
386 if (insn == BB_HEAD (bb->next_bb))
387 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
391 if (edge_cache)
392 sbitmap_vector_free (edge_cache);
395 static void
396 mark_tablejump_edge (rtx label)
398 basic_block bb;
400 gcc_assert (LABEL_P (label));
401 /* See comment in make_label_edge. */
402 if (INSN_UID (label) == 0)
403 return;
404 bb = BLOCK_FOR_INSN (label);
405 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
408 static void
409 purge_dead_tablejump_edges (basic_block bb, rtx table)
411 rtx insn = BB_END (bb), tmp;
412 rtvec vec;
413 int j;
414 edge_iterator ei;
415 edge e;
417 if (GET_CODE (PATTERN (table)) == ADDR_VEC)
418 vec = XVEC (PATTERN (table), 0);
419 else
420 vec = XVEC (PATTERN (table), 1);
422 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
423 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
425 /* Some targets (eg, ARM) emit a conditional jump that also
426 contains the out-of-range target. Scan for these and
427 add an edge if necessary. */
428 if ((tmp = single_set (insn)) != NULL
429 && SET_DEST (tmp) == pc_rtx
430 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
431 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
432 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0));
434 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
436 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
437 SET_STATE (e->dest, FULL_STATE (e->dest)
438 & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
439 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
441 remove_edge (e);
442 continue;
444 ei_next (&ei);
448 /* Scan basic block BB for possible BB boundaries inside the block
449 and create new basic blocks in the progress. */
451 static void
452 find_bb_boundaries (basic_block bb)
454 basic_block orig_bb = bb;
455 rtx insn = BB_HEAD (bb);
456 rtx end = BB_END (bb), x;
457 rtx table;
458 rtx flow_transfer_insn = NULL_RTX;
459 edge fallthru = NULL;
461 if (insn == BB_END (bb))
462 return;
464 if (LABEL_P (insn))
465 insn = NEXT_INSN (insn);
467 /* Scan insn chain and try to find new basic block boundaries. */
468 while (1)
470 enum rtx_code code = GET_CODE (insn);
472 /* In case we've previously seen an insn that effects a control
473 flow transfer, split the block. */
474 if ((flow_transfer_insn || code == CODE_LABEL)
475 && inside_basic_block_p (insn))
477 fallthru = split_block (bb, PREV_INSN (insn));
478 if (flow_transfer_insn)
480 BB_END (bb) = flow_transfer_insn;
482 /* Clean up the bb field for the insns between the blocks. */
483 for (x = NEXT_INSN (flow_transfer_insn);
484 x != BB_HEAD (fallthru->dest);
485 x = NEXT_INSN (x))
486 if (!BARRIER_P (x))
487 set_block_for_insn (x, NULL);
490 bb = fallthru->dest;
491 remove_edge (fallthru);
492 flow_transfer_insn = NULL_RTX;
493 if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
494 make_edge (ENTRY_BLOCK_PTR, bb, 0);
496 else if (code == BARRIER)
498 /* __builtin_unreachable () may cause a barrier to be emitted in
499 the middle of a BB. We need to split it in the same manner as
500 if the barrier were preceded by a control_flow_insn_p insn. */
501 if (!flow_transfer_insn)
502 flow_transfer_insn = prev_nonnote_insn_bb (insn);
505 if (control_flow_insn_p (insn))
506 flow_transfer_insn = insn;
507 if (insn == end)
508 break;
509 insn = NEXT_INSN (insn);
512 /* In case expander replaced normal insn by sequence terminating by
513 return and barrier, or possibly other sequence not behaving like
514 ordinary jump, we need to take care and move basic block boundary. */
515 if (flow_transfer_insn)
517 BB_END (bb) = flow_transfer_insn;
519 /* Clean up the bb field for the insns that do not belong to BB. */
520 x = flow_transfer_insn;
521 while (x != end)
523 x = NEXT_INSN (x);
524 if (!BARRIER_P (x))
525 set_block_for_insn (x, NULL);
529 /* We've possibly replaced the conditional jump by conditional jump
530 followed by cleanup at fallthru edge, so the outgoing edges may
531 be dead. */
532 purge_dead_edges (bb);
534 /* purge_dead_edges doesn't handle tablejump's, but if we have split the
535 basic block, we might need to kill some edges. */
536 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
537 purge_dead_tablejump_edges (bb, table);
540 /* Assume that frequency of basic block B is known. Compute frequencies
541 and probabilities of outgoing edges. */
543 static void
544 compute_outgoing_frequencies (basic_block b)
546 edge e, f;
547 edge_iterator ei;
549 if (EDGE_COUNT (b->succs) == 2)
551 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
552 int probability;
554 if (note)
556 probability = INTVAL (XEXP (note, 0));
557 e = BRANCH_EDGE (b);
558 e->probability = probability;
559 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
560 / REG_BR_PROB_BASE);
561 f = FALLTHRU_EDGE (b);
562 f->probability = REG_BR_PROB_BASE - probability;
563 f->count = b->count - e->count;
564 return;
568 if (single_succ_p (b))
570 e = single_succ_edge (b);
571 e->probability = REG_BR_PROB_BASE;
572 e->count = b->count;
573 return;
575 guess_outgoing_edge_probabilities (b);
576 if (b->count)
577 FOR_EACH_EDGE (e, ei, b->succs)
578 e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2)
579 / REG_BR_PROB_BASE);
582 /* Assume that some pass has inserted labels or control flow
583 instructions within a basic block. Split basic blocks as needed
584 and create edges. */
586 void
587 find_many_sub_basic_blocks (sbitmap blocks)
589 basic_block bb, min, max;
591 FOR_EACH_BB (bb)
592 SET_STATE (bb,
593 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
595 FOR_EACH_BB (bb)
596 if (STATE (bb) == BLOCK_TO_SPLIT)
597 find_bb_boundaries (bb);
599 FOR_EACH_BB (bb)
600 if (STATE (bb) != BLOCK_ORIGINAL)
601 break;
603 min = max = bb;
604 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
605 if (STATE (bb) != BLOCK_ORIGINAL)
606 max = bb;
608 /* Now re-scan and wire in all edges. This expect simple (conditional)
609 jumps at the end of each new basic blocks. */
610 make_edges (min, max, 1);
612 /* Update branch probabilities. Expect only (un)conditional jumps
613 to be created with only the forward edges. */
614 if (profile_status != PROFILE_ABSENT)
615 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
617 edge e;
618 edge_iterator ei;
620 if (STATE (bb) == BLOCK_ORIGINAL)
621 continue;
622 if (STATE (bb) == BLOCK_NEW)
624 bb->count = 0;
625 bb->frequency = 0;
626 FOR_EACH_EDGE (e, ei, bb->preds)
628 bb->count += e->count;
629 bb->frequency += EDGE_FREQUENCY (e);
633 compute_outgoing_frequencies (bb);
636 FOR_EACH_BB (bb)
637 SET_STATE (bb, 0);