Daily bump.
[official-gcc.git] / gcc / cfganal.c
blob6009d59cbe85884295ed0e1b40554ee90297d07a
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "obstack.h"
32 #include "tm_p.h"
34 /* Store the data structures necessary for depth-first search. */
35 struct depth_first_search_dsS {
36 /* stack for backtracking during the algorithm */
37 basic_block *stack;
39 /* number of edges in the stack. That is, positions 0, ..., sp-1
40 have edges. */
41 unsigned int sp;
43 /* record of basic blocks already seen by depth-first search */
44 sbitmap visited_blocks;
46 typedef struct depth_first_search_dsS *depth_first_search_ds;
48 static void flow_dfs_compute_reverse_init
49 PARAMS ((depth_first_search_ds));
50 static void flow_dfs_compute_reverse_add_bb
51 PARAMS ((depth_first_search_ds, basic_block));
52 static basic_block flow_dfs_compute_reverse_execute
53 PARAMS ((depth_first_search_ds));
54 static void flow_dfs_compute_reverse_finish
55 PARAMS ((depth_first_search_ds));
56 static void remove_fake_successors PARAMS ((basic_block));
57 static bool need_fake_edge_p PARAMS ((rtx));
58 static bool keep_with_call_p PARAMS ((rtx));
60 /* Return true if the block has no effect and only forwards control flow to
61 its single destination. */
63 bool
64 forwarder_block_p (bb)
65 basic_block bb;
67 rtx insn;
69 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
70 || !bb->succ || bb->succ->succ_next)
71 return false;
73 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
74 if (INSN_P (insn) && active_insn_p (insn))
75 return false;
77 return (!INSN_P (insn)
78 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
79 || !active_insn_p (insn));
82 /* Return nonzero if we can reach target from src by falling through. */
84 bool
85 can_fallthru (src, target)
86 basic_block src, target;
88 rtx insn = src->end;
89 rtx insn2 = target->head;
91 if (src->index + 1 == target->index && !active_insn_p (insn2))
92 insn2 = next_active_insn (insn2);
94 /* ??? Later we may add code to move jump tables offline. */
95 return next_active_insn (insn) == insn2;
98 /* Mark the back edges in DFS traversal.
99 Return non-zero if a loop (natural or otherwise) is present.
100 Inspired by Depth_First_Search_PP described in:
102 Advanced Compiler Design and Implementation
103 Steven Muchnick
104 Morgan Kaufmann, 1997
106 and heavily borrowed from flow_depth_first_order_compute. */
108 bool
109 mark_dfs_back_edges ()
111 edge *stack;
112 int *pre;
113 int *post;
114 int sp;
115 int prenum = 1;
116 int postnum = 1;
117 sbitmap visited;
118 bool found = false;
120 /* Allocate the preorder and postorder number arrays. */
121 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
122 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
124 /* Allocate stack for back-tracking up CFG. */
125 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
126 sp = 0;
128 /* Allocate bitmap to track nodes that have been visited. */
129 visited = sbitmap_alloc (n_basic_blocks);
131 /* None of the nodes in the CFG have been visited yet. */
132 sbitmap_zero (visited);
134 /* Push the first edge on to the stack. */
135 stack[sp++] = ENTRY_BLOCK_PTR->succ;
137 while (sp)
139 edge e;
140 basic_block src;
141 basic_block dest;
143 /* Look at the edge on the top of the stack. */
144 e = stack[sp - 1];
145 src = e->src;
146 dest = e->dest;
147 e->flags &= ~EDGE_DFS_BACK;
149 /* Check if the edge destination has been visited yet. */
150 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
152 /* Mark that we have visited the destination. */
153 SET_BIT (visited, dest->index);
155 pre[dest->index] = prenum++;
156 if (dest->succ)
158 /* Since the DEST node has been visited for the first
159 time, check its successors. */
160 stack[sp++] = dest->succ;
162 else
163 post[dest->index] = postnum++;
165 else
167 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
168 && pre[src->index] >= pre[dest->index]
169 && post[dest->index] == 0)
170 e->flags |= EDGE_DFS_BACK, found = true;
172 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
173 post[src->index] = postnum++;
175 if (e->succ_next)
176 stack[sp - 1] = e->succ_next;
177 else
178 sp--;
182 free (pre);
183 free (post);
184 free (stack);
185 sbitmap_free (visited);
187 return found;
190 /* Return true if we need to add fake edge to exit.
191 Helper function for the flow_call_edges_add. */
193 static bool
194 need_fake_edge_p (insn)
195 rtx insn;
197 if (!INSN_P (insn))
198 return false;
200 if ((GET_CODE (insn) == CALL_INSN
201 && !SIBLING_CALL_P (insn)
202 && !find_reg_note (insn, REG_NORETURN, NULL)
203 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
204 && !CONST_OR_PURE_CALL_P (insn)))
205 return true;
207 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
208 && MEM_VOLATILE_P (PATTERN (insn)))
209 || (GET_CODE (PATTERN (insn)) == PARALLEL
210 && asm_noperands (insn) != -1
211 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
212 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
215 /* Return true if INSN should be kept in the same block as a preceding call.
216 This is done for a single-set whose destination is a fixed register or
217 whose source is the function return value. This is a helper function for
218 flow_call_edges_add. */
220 static bool
221 keep_with_call_p (insn)
222 rtx insn;
224 rtx set;
226 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
228 if (GET_CODE (SET_DEST (set)) == REG
229 && fixed_regs[REGNO (SET_DEST (set))]
230 && general_operand (SET_SRC (set), VOIDmode))
231 return true;
232 if (GET_CODE (SET_SRC (set)) == REG
233 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
234 && GET_CODE (SET_DEST (set)) == REG
235 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
236 return true;
238 return false;
241 /* Add fake edges to the function exit for any non constant and non noreturn
242 calls, volatile inline assembly in the bitmap of blocks specified by
243 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
244 that were split.
246 The goal is to expose cases in which entering a basic block does not imply
247 that all subsequent instructions must be executed. */
250 flow_call_edges_add (blocks)
251 sbitmap blocks;
253 int i;
254 int blocks_split = 0;
255 int bb_num = 0;
256 basic_block *bbs;
257 bool check_last_block = false;
259 /* Map bb indices into basic block pointers since split_block
260 will renumber the basic blocks. */
262 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
264 if (! blocks)
266 for (i = 0; i < n_basic_blocks; i++)
267 bbs[bb_num++] = BASIC_BLOCK (i);
269 check_last_block = true;
271 else
272 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
274 bbs[bb_num++] = BASIC_BLOCK (i);
275 if (i == n_basic_blocks - 1)
276 check_last_block = true;
279 /* In the last basic block, before epilogue generation, there will be
280 a fallthru edge to EXIT. Special care is required if the last insn
281 of the last basic block is a call because make_edge folds duplicate
282 edges, which would result in the fallthru edge also being marked
283 fake, which would result in the fallthru edge being removed by
284 remove_fake_edges, which would result in an invalid CFG.
286 Moreover, we can't elide the outgoing fake edge, since the block
287 profiler needs to take this into account in order to solve the minimal
288 spanning tree in the case that the call doesn't return.
290 Handle this by adding a dummy instruction in a new last basic block. */
291 if (check_last_block)
293 basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
294 rtx insn = bb->end;
296 /* Back up past insns that must be kept in the same block as a call. */
297 while (insn != bb->head
298 && keep_with_call_p (insn))
299 insn = PREV_INSN (insn);
301 if (need_fake_edge_p (insn))
303 edge e;
305 for (e = bb->succ; e; e = e->succ_next)
306 if (e->dest == EXIT_BLOCK_PTR)
307 break;
309 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
310 commit_edge_insertions ();
314 /* Now add fake edges to the function exit for any non constant
315 calls since there is no way that we can determine if they will
316 return or not... */
318 for (i = 0; i < bb_num; i++)
320 basic_block bb = bbs[i];
321 rtx insn;
322 rtx prev_insn;
324 for (insn = bb->end; ; insn = prev_insn)
326 prev_insn = PREV_INSN (insn);
327 if (need_fake_edge_p (insn))
329 edge e;
330 rtx split_at_insn = insn;
332 /* Don't split the block between a call and an insn that should
333 remain in the same block as the call. */
334 if (GET_CODE (insn) == CALL_INSN)
335 while (split_at_insn != bb->end
336 && keep_with_call_p (NEXT_INSN (split_at_insn)))
337 split_at_insn = NEXT_INSN (split_at_insn);
339 /* The handling above of the final block before the epilogue
340 should be enough to verify that there is no edge to the exit
341 block in CFG already. Calling make_edge in such case would
342 cause us to mark that edge as fake and remove it later. */
344 #ifdef ENABLE_CHECKING
345 if (split_at_insn == bb->end)
346 for (e = bb->succ; e; e = e->succ_next)
347 if (e->dest == EXIT_BLOCK_PTR)
348 abort ();
349 #endif
351 /* Note that the following may create a new basic block
352 and renumber the existing basic blocks. */
353 e = split_block (bb, split_at_insn);
354 if (e)
355 blocks_split++;
357 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
360 if (insn == bb->head)
361 break;
365 if (blocks_split)
366 verify_flow_info ();
368 free (bbs);
369 return blocks_split;
372 /* Find unreachable blocks. An unreachable block will have 0 in
373 the reachable bit in block->flags. A non-zero value indicates the
374 block is reachable. */
376 void
377 find_unreachable_blocks ()
379 edge e;
380 int i, n;
381 basic_block *tos, *worklist;
383 n = n_basic_blocks;
384 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
386 /* Clear all the reachability flags. */
388 for (i = 0; i < n; ++i)
389 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
391 /* Add our starting points to the worklist. Almost always there will
392 be only one. It isn't inconceivable that we might one day directly
393 support Fortran alternate entry points. */
395 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
397 *tos++ = e->dest;
399 /* Mark the block reachable. */
400 e->dest->flags |= BB_REACHABLE;
403 /* Iterate: find everything reachable from what we've already seen. */
405 while (tos != worklist)
407 basic_block b = *--tos;
409 for (e = b->succ; e; e = e->succ_next)
410 if (!(e->dest->flags & BB_REACHABLE))
412 *tos++ = e->dest;
413 e->dest->flags |= BB_REACHABLE;
417 free (worklist);
420 /* Functions to access an edge list with a vector representation.
421 Enough data is kept such that given an index number, the
422 pred and succ that edge represents can be determined, or
423 given a pred and a succ, its index number can be returned.
424 This allows algorithms which consume a lot of memory to
425 represent the normally full matrix of edge (pred,succ) with a
426 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
427 wasted space in the client code due to sparse flow graphs. */
429 /* This functions initializes the edge list. Basically the entire
430 flowgraph is processed, and all edges are assigned a number,
431 and the data structure is filled in. */
433 struct edge_list *
434 create_edge_list ()
436 struct edge_list *elist;
437 edge e;
438 int num_edges;
439 int x;
440 int block_count;
442 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
444 num_edges = 0;
446 /* Determine the number of edges in the flow graph by counting successor
447 edges on each basic block. */
448 for (x = 0; x < n_basic_blocks; x++)
450 basic_block bb = BASIC_BLOCK (x);
452 for (e = bb->succ; e; e = e->succ_next)
453 num_edges++;
456 /* Don't forget successors of the entry block. */
457 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
458 num_edges++;
460 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
461 elist->num_blocks = block_count;
462 elist->num_edges = num_edges;
463 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
465 num_edges = 0;
467 /* Follow successors of the entry block, and register these edges. */
468 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
469 elist->index_to_edge[num_edges++] = e;
471 for (x = 0; x < n_basic_blocks; x++)
473 basic_block bb = BASIC_BLOCK (x);
475 /* Follow all successors of blocks, and register these edges. */
476 for (e = bb->succ; e; e = e->succ_next)
477 elist->index_to_edge[num_edges++] = e;
480 return elist;
483 /* This function free's memory associated with an edge list. */
485 void
486 free_edge_list (elist)
487 struct edge_list *elist;
489 if (elist)
491 free (elist->index_to_edge);
492 free (elist);
496 /* This function provides debug output showing an edge list. */
498 void
499 print_edge_list (f, elist)
500 FILE *f;
501 struct edge_list *elist;
503 int x;
505 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
506 elist->num_blocks - 2, elist->num_edges);
508 for (x = 0; x < elist->num_edges; x++)
510 fprintf (f, " %-4d - edge(", x);
511 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
512 fprintf (f, "entry,");
513 else
514 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
516 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
517 fprintf (f, "exit)\n");
518 else
519 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
523 /* This function provides an internal consistency check of an edge list,
524 verifying that all edges are present, and that there are no
525 extra edges. */
527 void
528 verify_edge_list (f, elist)
529 FILE *f;
530 struct edge_list *elist;
532 int x, pred, succ, index;
533 edge e;
535 for (x = 0; x < n_basic_blocks; x++)
537 basic_block bb = BASIC_BLOCK (x);
539 for (e = bb->succ; e; e = e->succ_next)
541 pred = e->src->index;
542 succ = e->dest->index;
543 index = EDGE_INDEX (elist, e->src, e->dest);
544 if (index == EDGE_INDEX_NO_EDGE)
546 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
547 continue;
550 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
551 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
552 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
553 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
554 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
555 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
559 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
561 pred = e->src->index;
562 succ = e->dest->index;
563 index = EDGE_INDEX (elist, e->src, e->dest);
564 if (index == EDGE_INDEX_NO_EDGE)
566 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
567 continue;
570 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
571 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
572 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
573 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
574 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
575 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
578 /* We've verified that all the edges are in the list, no lets make sure
579 there are no spurious edges in the list. */
581 for (pred = 0; pred < n_basic_blocks; pred++)
582 for (succ = 0; succ < n_basic_blocks; succ++)
584 basic_block p = BASIC_BLOCK (pred);
585 basic_block s = BASIC_BLOCK (succ);
586 int found_edge = 0;
588 for (e = p->succ; e; e = e->succ_next)
589 if (e->dest == s)
591 found_edge = 1;
592 break;
595 for (e = s->pred; e; e = e->pred_next)
596 if (e->src == p)
598 found_edge = 1;
599 break;
602 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
603 == EDGE_INDEX_NO_EDGE && found_edge != 0)
604 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
605 pred, succ);
606 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
607 != EDGE_INDEX_NO_EDGE && found_edge == 0)
608 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
609 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
610 BASIC_BLOCK (succ)));
613 for (succ = 0; succ < n_basic_blocks; succ++)
615 basic_block p = ENTRY_BLOCK_PTR;
616 basic_block s = BASIC_BLOCK (succ);
617 int found_edge = 0;
619 for (e = p->succ; e; e = e->succ_next)
620 if (e->dest == s)
622 found_edge = 1;
623 break;
626 for (e = s->pred; e; e = e->pred_next)
627 if (e->src == p)
629 found_edge = 1;
630 break;
633 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
634 == EDGE_INDEX_NO_EDGE && found_edge != 0)
635 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
636 succ);
637 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
638 != EDGE_INDEX_NO_EDGE && found_edge == 0)
639 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
640 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
641 BASIC_BLOCK (succ)));
644 for (pred = 0; pred < n_basic_blocks; pred++)
646 basic_block p = BASIC_BLOCK (pred);
647 basic_block s = EXIT_BLOCK_PTR;
648 int found_edge = 0;
650 for (e = p->succ; e; e = e->succ_next)
651 if (e->dest == s)
653 found_edge = 1;
654 break;
657 for (e = s->pred; e; e = e->pred_next)
658 if (e->src == p)
660 found_edge = 1;
661 break;
664 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
665 == EDGE_INDEX_NO_EDGE && found_edge != 0)
666 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
667 pred);
668 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
669 != EDGE_INDEX_NO_EDGE && found_edge == 0)
670 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
671 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
672 EXIT_BLOCK_PTR));
676 /* This routine will determine what, if any, edge there is between
677 a specified predecessor and successor. */
680 find_edge_index (edge_list, pred, succ)
681 struct edge_list *edge_list;
682 basic_block pred, succ;
684 int x;
686 for (x = 0; x < NUM_EDGES (edge_list); x++)
687 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
688 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
689 return x;
691 return (EDGE_INDEX_NO_EDGE);
694 /* Dump the list of basic blocks in the bitmap NODES. */
696 void
697 flow_nodes_print (str, nodes, file)
698 const char *str;
699 const sbitmap nodes;
700 FILE *file;
702 int node;
704 if (! nodes)
705 return;
707 fprintf (file, "%s { ", str);
708 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
709 fputs ("}\n", file);
712 /* Dump the list of edges in the array EDGE_LIST. */
714 void
715 flow_edge_list_print (str, edge_list, num_edges, file)
716 const char *str;
717 const edge *edge_list;
718 int num_edges;
719 FILE *file;
721 int i;
723 if (! edge_list)
724 return;
726 fprintf (file, "%s { ", str);
727 for (i = 0; i < num_edges; i++)
728 fprintf (file, "%d->%d ", edge_list[i]->src->index,
729 edge_list[i]->dest->index);
731 fputs ("}\n", file);
735 /* This routine will remove any fake successor edges for a basic block.
736 When the edge is removed, it is also removed from whatever predecessor
737 list it is in. */
739 static void
740 remove_fake_successors (bb)
741 basic_block bb;
743 edge e;
745 for (e = bb->succ; e;)
747 edge tmp = e;
749 e = e->succ_next;
750 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
751 remove_edge (tmp);
755 /* This routine will remove all fake edges from the flow graph. If
756 we remove all fake successors, it will automatically remove all
757 fake predecessors. */
759 void
760 remove_fake_edges ()
762 int x;
764 for (x = 0; x < n_basic_blocks; x++)
765 remove_fake_successors (BASIC_BLOCK (x));
767 /* We've handled all successors except the entry block's. */
768 remove_fake_successors (ENTRY_BLOCK_PTR);
771 /* This function will add a fake edge between any block which has no
772 successors, and the exit block. Some data flow equations require these
773 edges to exist. */
775 void
776 add_noreturn_fake_exit_edges ()
778 int x;
780 for (x = 0; x < n_basic_blocks; x++)
781 if (BASIC_BLOCK (x)->succ == NULL)
782 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
785 /* This function adds a fake edge between any infinite loops to the
786 exit block. Some optimizations require a path from each node to
787 the exit node.
789 See also Morgan, Figure 3.10, pp. 82-83.
791 The current implementation is ugly, not attempting to minimize the
792 number of inserted fake edges. To reduce the number of fake edges
793 to insert, add fake edges from _innermost_ loops containing only
794 nodes not reachable from the exit block. */
796 void
797 connect_infinite_loops_to_exit ()
799 basic_block unvisited_block;
800 struct depth_first_search_dsS dfs_ds;
802 /* Perform depth-first search in the reverse graph to find nodes
803 reachable from the exit block. */
804 flow_dfs_compute_reverse_init (&dfs_ds);
805 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
807 /* Repeatedly add fake edges, updating the unreachable nodes. */
808 while (1)
810 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
811 if (!unvisited_block)
812 break;
814 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
815 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
818 flow_dfs_compute_reverse_finish (&dfs_ds);
819 return;
822 /* Compute reverse top sort order */
824 void
825 flow_reverse_top_sort_order_compute (rts_order)
826 int *rts_order;
828 edge *stack;
829 int sp;
830 int postnum = 0;
831 sbitmap visited;
833 /* Allocate stack for back-tracking up CFG. */
834 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
835 sp = 0;
837 /* Allocate bitmap to track nodes that have been visited. */
838 visited = sbitmap_alloc (n_basic_blocks);
840 /* None of the nodes in the CFG have been visited yet. */
841 sbitmap_zero (visited);
843 /* Push the first edge on to the stack. */
844 stack[sp++] = ENTRY_BLOCK_PTR->succ;
846 while (sp)
848 edge e;
849 basic_block src;
850 basic_block dest;
852 /* Look at the edge on the top of the stack. */
853 e = stack[sp - 1];
854 src = e->src;
855 dest = e->dest;
857 /* Check if the edge destination has been visited yet. */
858 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
860 /* Mark that we have visited the destination. */
861 SET_BIT (visited, dest->index);
863 if (dest->succ)
864 /* Since the DEST node has been visited for the first
865 time, check its successors. */
866 stack[sp++] = dest->succ;
867 else
868 rts_order[postnum++] = dest->index;
870 else
872 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
873 rts_order[postnum++] = src->index;
875 if (e->succ_next)
876 stack[sp - 1] = e->succ_next;
877 else
878 sp--;
882 free (stack);
883 sbitmap_free (visited);
886 /* Compute the depth first search order and store in the array
887 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
888 RC_ORDER is non-zero, return the reverse completion number for each
889 node. Returns the number of nodes visited. A depth first search
890 tries to get as far away from the starting point as quickly as
891 possible. */
894 flow_depth_first_order_compute (dfs_order, rc_order)
895 int *dfs_order;
896 int *rc_order;
898 edge *stack;
899 int sp;
900 int dfsnum = 0;
901 int rcnum = n_basic_blocks - 1;
902 sbitmap visited;
904 /* Allocate stack for back-tracking up CFG. */
905 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
906 sp = 0;
908 /* Allocate bitmap to track nodes that have been visited. */
909 visited = sbitmap_alloc (n_basic_blocks);
911 /* None of the nodes in the CFG have been visited yet. */
912 sbitmap_zero (visited);
914 /* Push the first edge on to the stack. */
915 stack[sp++] = ENTRY_BLOCK_PTR->succ;
917 while (sp)
919 edge e;
920 basic_block src;
921 basic_block dest;
923 /* Look at the edge on the top of the stack. */
924 e = stack[sp - 1];
925 src = e->src;
926 dest = e->dest;
928 /* Check if the edge destination has been visited yet. */
929 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
931 /* Mark that we have visited the destination. */
932 SET_BIT (visited, dest->index);
934 if (dfs_order)
935 dfs_order[dfsnum] = dest->index;
937 dfsnum++;
939 if (dest->succ)
940 /* Since the DEST node has been visited for the first
941 time, check its successors. */
942 stack[sp++] = dest->succ;
943 else if (rc_order)
944 /* There are no successors for the DEST node so assign
945 its reverse completion number. */
946 rc_order[rcnum--] = dest->index;
948 else
950 if (! e->succ_next && src != ENTRY_BLOCK_PTR
951 && rc_order)
952 /* There are no more successors for the SRC node
953 so assign its reverse completion number. */
954 rc_order[rcnum--] = src->index;
956 if (e->succ_next)
957 stack[sp - 1] = e->succ_next;
958 else
959 sp--;
963 free (stack);
964 sbitmap_free (visited);
966 /* The number of nodes visited should not be greater than
967 n_basic_blocks. */
968 if (dfsnum > n_basic_blocks)
969 abort ();
971 /* There are some nodes left in the CFG that are unreachable. */
972 if (dfsnum < n_basic_blocks)
973 abort ();
975 return dfsnum;
978 struct dfst_node
980 unsigned nnodes;
981 struct dfst_node **node;
982 struct dfst_node *up;
985 /* Compute a preorder transversal ordering such that a sub-tree which
986 is the source of a cross edge appears before the sub-tree which is
987 the destination of the cross edge. This allows for easy detection
988 of all the entry blocks for a loop.
990 The ordering is compute by:
992 1) Generating a depth first spanning tree.
994 2) Walking the resulting tree from right to left. */
996 void
997 flow_preorder_transversal_compute (pot_order)
998 int *pot_order;
1000 edge e;
1001 edge *stack;
1002 int i;
1003 int max_successors;
1004 int sp;
1005 sbitmap visited;
1006 struct dfst_node *node;
1007 struct dfst_node *dfst;
1009 /* Allocate stack for back-tracking up CFG. */
1010 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1011 sp = 0;
1013 /* Allocate the tree. */
1014 dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
1015 sizeof (struct dfst_node));
1017 for (i = 0; i < n_basic_blocks; i++)
1019 max_successors = 0;
1020 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
1021 max_successors++;
1023 dfst[i].node
1024 = (max_successors
1025 ? (struct dfst_node **) xcalloc (max_successors,
1026 sizeof (struct dfst_node *))
1027 : NULL);
1030 /* Allocate bitmap to track nodes that have been visited. */
1031 visited = sbitmap_alloc (n_basic_blocks);
1033 /* None of the nodes in the CFG have been visited yet. */
1034 sbitmap_zero (visited);
1036 /* Push the first edge on to the stack. */
1037 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1039 while (sp)
1041 basic_block src;
1042 basic_block dest;
1044 /* Look at the edge on the top of the stack. */
1045 e = stack[sp - 1];
1046 src = e->src;
1047 dest = e->dest;
1049 /* Check if the edge destination has been visited yet. */
1050 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1052 /* Mark that we have visited the destination. */
1053 SET_BIT (visited, dest->index);
1055 /* Add the destination to the preorder tree. */
1056 if (src != ENTRY_BLOCK_PTR)
1058 dfst[src->index].node[dfst[src->index].nnodes++]
1059 = &dfst[dest->index];
1060 dfst[dest->index].up = &dfst[src->index];
1063 if (dest->succ)
1064 /* Since the DEST node has been visited for the first
1065 time, check its successors. */
1066 stack[sp++] = dest->succ;
1069 else if (e->succ_next)
1070 stack[sp - 1] = e->succ_next;
1071 else
1072 sp--;
1075 free (stack);
1076 sbitmap_free (visited);
1078 /* Record the preorder transversal order by
1079 walking the tree from right to left. */
1081 i = 0;
1082 node = &dfst[0];
1083 pot_order[i++] = 0;
1085 while (node)
1087 if (node->nnodes)
1089 node = node->node[--node->nnodes];
1090 pot_order[i++] = node - dfst;
1092 else
1093 node = node->up;
1096 /* Free the tree. */
1098 for (i = 0; i < n_basic_blocks; i++)
1099 if (dfst[i].node)
1100 free (dfst[i].node);
1102 free (dfst);
1105 /* Compute the depth first search order on the _reverse_ graph and
1106 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1107 Returns the number of nodes visited.
1109 The computation is split into three pieces:
1111 flow_dfs_compute_reverse_init () creates the necessary data
1112 structures.
1114 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1115 structures. The block will start the search.
1117 flow_dfs_compute_reverse_execute () continues (or starts) the
1118 search using the block on the top of the stack, stopping when the
1119 stack is empty.
1121 flow_dfs_compute_reverse_finish () destroys the necessary data
1122 structures.
1124 Thus, the user will probably call ..._init(), call ..._add_bb() to
1125 add a beginning basic block to the stack, call ..._execute(),
1126 possibly add another bb to the stack and again call ..._execute(),
1127 ..., and finally call _finish(). */
1129 /* Initialize the data structures used for depth-first search on the
1130 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1131 added to the basic block stack. DATA is the current depth-first
1132 search context. If INITIALIZE_STACK is non-zero, there is an
1133 element on the stack. */
1135 static void
1136 flow_dfs_compute_reverse_init (data)
1137 depth_first_search_ds data;
1139 /* Allocate stack for back-tracking up CFG. */
1140 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1141 * sizeof (basic_block));
1142 data->sp = 0;
1144 /* Allocate bitmap to track nodes that have been visited. */
1145 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1147 /* None of the nodes in the CFG have been visited yet. */
1148 sbitmap_zero (data->visited_blocks);
1150 return;
1153 /* Add the specified basic block to the top of the dfs data
1154 structures. When the search continues, it will start at the
1155 block. */
1157 static void
1158 flow_dfs_compute_reverse_add_bb (data, bb)
1159 depth_first_search_ds data;
1160 basic_block bb;
1162 data->stack[data->sp++] = bb;
1163 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1166 /* Continue the depth-first search through the reverse graph starting with the
1167 block at the stack's top and ending when the stack is empty. Visited nodes
1168 are marked. Returns an unvisited basic block, or NULL if there is none
1169 available. */
1171 static basic_block
1172 flow_dfs_compute_reverse_execute (data)
1173 depth_first_search_ds data;
1175 basic_block bb;
1176 edge e;
1177 int i;
1179 while (data->sp > 0)
1181 bb = data->stack[--data->sp];
1183 /* Perform depth-first search on adjacent vertices. */
1184 for (e = bb->pred; e; e = e->pred_next)
1185 if (!TEST_BIT (data->visited_blocks,
1186 e->src->index - (INVALID_BLOCK + 1)))
1187 flow_dfs_compute_reverse_add_bb (data, e->src);
1190 /* Determine if there are unvisited basic blocks. */
1191 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1192 if (!TEST_BIT (data->visited_blocks, i))
1193 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1195 return NULL;
1198 /* Destroy the data structures needed for depth-first search on the
1199 reverse graph. */
1201 static void
1202 flow_dfs_compute_reverse_finish (data)
1203 depth_first_search_ds data;
1205 free (data->stack);
1206 sbitmap_free (data->visited_blocks);