* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / cfganal.c
blob5ad1a10a9bb67701a894faecec849ae99e36b0d9
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "toplev.h"
30 #include "obstack.h"
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS {
34 /* stack for backtracking during the algorithm */
35 basic_block *stack;
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
38 have edges. */
39 unsigned int sp;
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks;
44 typedef struct depth_first_search_dsS *depth_first_search_ds;
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds, basic_block));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds));
54 static void remove_fake_successors PARAMS ((basic_block));
55 static bool need_fake_edge_p PARAMS ((rtx));
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
59 bool
60 forwarder_block_p (bb)
61 basic_block bb;
63 rtx insn = bb->head;
64 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
65 || !bb->succ || bb->succ->succ_next)
66 return false;
68 while (insn != bb->end)
70 if (active_insn_p (insn))
71 return false;
72 insn = NEXT_INSN (insn);
74 return (!active_insn_p (insn)
75 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
78 /* Return nonzero if we can reach target from src by falling trought. */
79 bool
80 can_fallthru (src, target)
81 basic_block src, target;
83 rtx insn = src->end;
84 rtx insn2 = target->head;
86 if (src->index + 1 == target->index && !active_insn_p (insn2))
87 insn2 = next_active_insn (insn2);
88 /* ??? Later we may add code to move jump tables offline. */
89 return next_active_insn (insn) == insn2;
92 /* Mark the back edges in DFS traversal.
93 Return non-zero if a loop (natural or otherwise) is present.
94 Inspired by Depth_First_Search_PP described in:
96 Advanced Compiler Design and Implementation
97 Steven Muchnick
98 Morgan Kaufmann, 1997
100 and heavily borrowed from flow_depth_first_order_compute. */
102 bool
103 mark_dfs_back_edges ()
105 edge *stack;
106 int *pre;
107 int *post;
108 int sp;
109 int prenum = 1;
110 int postnum = 1;
111 sbitmap visited;
112 bool found = false;
114 /* Allocate the preorder and postorder number arrays. */
115 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
116 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
118 /* Allocate stack for back-tracking up CFG. */
119 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
120 sp = 0;
122 /* Allocate bitmap to track nodes that have been visited. */
123 visited = sbitmap_alloc (n_basic_blocks);
125 /* None of the nodes in the CFG have been visited yet. */
126 sbitmap_zero (visited);
128 /* Push the first edge on to the stack. */
129 stack[sp++] = ENTRY_BLOCK_PTR->succ;
131 while (sp)
133 edge e;
134 basic_block src;
135 basic_block dest;
137 /* Look at the edge on the top of the stack. */
138 e = stack[sp - 1];
139 src = e->src;
140 dest = e->dest;
141 e->flags &= ~EDGE_DFS_BACK;
143 /* Check if the edge destination has been visited yet. */
144 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
146 /* Mark that we have visited the destination. */
147 SET_BIT (visited, dest->index);
149 pre[dest->index] = prenum++;
151 if (dest->succ)
153 /* Since the DEST node has been visited for the first
154 time, check its successors. */
155 stack[sp++] = dest->succ;
157 else
158 post[dest->index] = postnum++;
160 else
162 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
163 && pre[src->index] >= pre[dest->index]
164 && post[dest->index] == 0)
165 e->flags |= EDGE_DFS_BACK, found = true;
167 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
168 post[src->index] = postnum++;
170 if (e->succ_next)
171 stack[sp - 1] = e->succ_next;
172 else
173 sp--;
177 free (pre);
178 free (post);
179 free (stack);
180 sbitmap_free (visited);
182 return found;
185 /* Return true if we need to add fake edge to exit.
186 Helper function for the flow_call_edges_add. */
188 static bool
189 need_fake_edge_p (insn)
190 rtx insn;
192 if (!INSN_P (insn))
193 return false;
195 if ((GET_CODE (insn) == CALL_INSN
196 && !SIBLING_CALL_P (insn)
197 && !find_reg_note (insn, REG_NORETURN, NULL)
198 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
199 && !CONST_OR_PURE_CALL_P (insn)))
200 return true;
202 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
203 && MEM_VOLATILE_P (PATTERN (insn)))
204 || (GET_CODE (PATTERN (insn)) == PARALLEL
205 && asm_noperands (insn) != -1
206 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
207 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
210 /* Add fake edges to the function exit for any non constant and non noreturn
211 calls, volatile inline assembly in the bitmap of blocks specified by
212 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
213 that were split.
215 The goal is to expose cases in which entering a basic block does not imply
216 that all subsequent instructions must be executed. */
219 flow_call_edges_add (blocks)
220 sbitmap blocks;
222 int i;
223 int blocks_split = 0;
224 int bb_num = 0;
225 basic_block *bbs;
226 bool check_last_block = false;
228 /* Map bb indicies into basic block pointers since split_block
229 will renumber the basic blocks. */
231 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
233 if (! blocks)
235 for (i = 0; i < n_basic_blocks; i++)
236 bbs[bb_num++] = BASIC_BLOCK (i);
237 check_last_block = true;
239 else
241 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
243 bbs[bb_num++] = BASIC_BLOCK (i);
244 if (i == n_basic_blocks - 1)
245 check_last_block = true;
249 /* In the last basic block, before epilogue generation, there will be
250 a fallthru edge to EXIT. Special care is required if the last insn
251 of the last basic block is a call because make_edge folds duplicate
252 edges, which would result in the fallthru edge also being marked
253 fake, which would result in the fallthru edge being removed by
254 remove_fake_edges, which would result in an invalid CFG.
256 Moreover, we can't elide the outgoing fake edge, since the block
257 profiler needs to take this into account in order to solve the minimal
258 spanning tree in the case that the call doesn't return.
260 Handle this by adding a dummy instruction in a new last basic block. */
261 if (check_last_block
262 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
264 edge e;
265 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
266 if (e->dest == EXIT_BLOCK_PTR)
267 break;
268 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
269 commit_edge_insertions ();
273 /* Now add fake edges to the function exit for any non constant
274 calls since there is no way that we can determine if they will
275 return or not... */
277 for (i = 0; i < bb_num; i++)
279 basic_block bb = bbs[i];
280 rtx insn;
281 rtx prev_insn;
283 for (insn = bb->end; ; insn = prev_insn)
285 prev_insn = PREV_INSN (insn);
286 if (need_fake_edge_p (insn))
288 edge e;
290 /* The above condition should be enought to verify that there is
291 no edge to the exit block in CFG already. Calling make_edge in
292 such case would make us to mark that edge as fake and remove it
293 later. */
294 #ifdef ENABLE_CHECKING
295 if (insn == bb->end)
296 for (e = bb->succ; e; e = e->succ_next)
297 if (e->dest == EXIT_BLOCK_PTR)
298 abort ();
299 #endif
301 /* Note that the following may create a new basic block
302 and renumber the existing basic blocks. */
303 e = split_block (bb, insn);
304 if (e)
305 blocks_split++;
307 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
309 if (insn == bb->head)
310 break;
314 if (blocks_split)
315 verify_flow_info ();
317 free (bbs);
318 return blocks_split;
320 /* Find unreachable blocks. An unreachable block will have 0 in
321 the reachable bit in block->flags. A non-zero value indicates the
322 block is reachable. */
324 void
325 find_unreachable_blocks ()
327 edge e;
328 int i, n;
329 basic_block *tos, *worklist;
331 n = n_basic_blocks;
332 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
334 /* Clear all the reachability flags. */
336 for (i = 0; i < n; ++i)
337 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
339 /* Add our starting points to the worklist. Almost always there will
340 be only one. It isn't inconcievable that we might one day directly
341 support Fortran alternate entry points. */
343 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
345 *tos++ = e->dest;
347 /* Mark the block reachable. */
348 e->dest->flags |= BB_REACHABLE;
351 /* Iterate: find everything reachable from what we've already seen. */
353 while (tos != worklist)
355 basic_block b = *--tos;
357 for (e = b->succ; e; e = e->succ_next)
358 if (!(e->dest->flags & BB_REACHABLE))
360 *tos++ = e->dest;
361 e->dest->flags |= BB_REACHABLE;
365 free (worklist);
368 /* Functions to access an edge list with a vector representation.
369 Enough data is kept such that given an index number, the
370 pred and succ that edge represents can be determined, or
371 given a pred and a succ, its index number can be returned.
372 This allows algorithms which consume a lot of memory to
373 represent the normally full matrix of edge (pred,succ) with a
374 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
375 wasted space in the client code due to sparse flow graphs. */
377 /* This functions initializes the edge list. Basically the entire
378 flowgraph is processed, and all edges are assigned a number,
379 and the data structure is filled in. */
381 struct edge_list *
382 create_edge_list ()
384 struct edge_list *elist;
385 edge e;
386 int num_edges;
387 int x;
388 int block_count;
390 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
392 num_edges = 0;
394 /* Determine the number of edges in the flow graph by counting successor
395 edges on each basic block. */
396 for (x = 0; x < n_basic_blocks; x++)
398 basic_block bb = BASIC_BLOCK (x);
400 for (e = bb->succ; e; e = e->succ_next)
401 num_edges++;
403 /* Don't forget successors of the entry block. */
404 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
405 num_edges++;
407 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
408 elist->num_blocks = block_count;
409 elist->num_edges = num_edges;
410 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
412 num_edges = 0;
414 /* Follow successors of the entry block, and register these edges. */
415 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
417 elist->index_to_edge[num_edges] = e;
418 num_edges++;
421 for (x = 0; x < n_basic_blocks; x++)
423 basic_block bb = BASIC_BLOCK (x);
425 /* Follow all successors of blocks, and register these edges. */
426 for (e = bb->succ; e; e = e->succ_next)
428 elist->index_to_edge[num_edges] = e;
429 num_edges++;
432 return elist;
435 /* This function free's memory associated with an edge list. */
437 void
438 free_edge_list (elist)
439 struct edge_list *elist;
441 if (elist)
443 free (elist->index_to_edge);
444 free (elist);
448 /* This function provides debug output showing an edge list. */
450 void
451 print_edge_list (f, elist)
452 FILE *f;
453 struct edge_list *elist;
455 int x;
456 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
457 elist->num_blocks - 2, elist->num_edges);
459 for (x = 0; x < elist->num_edges; x++)
461 fprintf (f, " %-4d - edge(", x);
462 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
463 fprintf (f, "entry,");
464 else
465 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
467 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
468 fprintf (f, "exit)\n");
469 else
470 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
474 /* This function provides an internal consistency check of an edge list,
475 verifying that all edges are present, and that there are no
476 extra edges. */
478 void
479 verify_edge_list (f, elist)
480 FILE *f;
481 struct edge_list *elist;
483 int x, pred, succ, index;
484 edge e;
486 for (x = 0; x < n_basic_blocks; x++)
488 basic_block bb = BASIC_BLOCK (x);
490 for (e = bb->succ; e; e = e->succ_next)
492 pred = e->src->index;
493 succ = e->dest->index;
494 index = EDGE_INDEX (elist, e->src, e->dest);
495 if (index == EDGE_INDEX_NO_EDGE)
497 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
498 continue;
500 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
501 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
502 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
503 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
504 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
505 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
508 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
510 pred = e->src->index;
511 succ = e->dest->index;
512 index = EDGE_INDEX (elist, e->src, e->dest);
513 if (index == EDGE_INDEX_NO_EDGE)
515 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
516 continue;
518 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
519 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
520 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
521 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
522 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
523 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
525 /* We've verified that all the edges are in the list, no lets make sure
526 there are no spurious edges in the list. */
528 for (pred = 0; pred < n_basic_blocks; pred++)
529 for (succ = 0; succ < n_basic_blocks; succ++)
531 basic_block p = BASIC_BLOCK (pred);
532 basic_block s = BASIC_BLOCK (succ);
534 int found_edge = 0;
536 for (e = p->succ; e; e = e->succ_next)
537 if (e->dest == s)
539 found_edge = 1;
540 break;
542 for (e = s->pred; e; e = e->pred_next)
543 if (e->src == p)
545 found_edge = 1;
546 break;
548 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
549 == EDGE_INDEX_NO_EDGE && found_edge != 0)
550 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
551 pred, succ);
552 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
553 != EDGE_INDEX_NO_EDGE && found_edge == 0)
554 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
555 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
556 BASIC_BLOCK (succ)));
558 for (succ = 0; succ < n_basic_blocks; succ++)
560 basic_block p = ENTRY_BLOCK_PTR;
561 basic_block s = BASIC_BLOCK (succ);
563 int found_edge = 0;
565 for (e = p->succ; e; e = e->succ_next)
566 if (e->dest == s)
568 found_edge = 1;
569 break;
571 for (e = s->pred; e; e = e->pred_next)
572 if (e->src == p)
574 found_edge = 1;
575 break;
577 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
578 == EDGE_INDEX_NO_EDGE && found_edge != 0)
579 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
580 succ);
581 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
582 != EDGE_INDEX_NO_EDGE && found_edge == 0)
583 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
584 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
585 BASIC_BLOCK (succ)));
587 for (pred = 0; pred < n_basic_blocks; pred++)
589 basic_block p = BASIC_BLOCK (pred);
590 basic_block s = EXIT_BLOCK_PTR;
592 int found_edge = 0;
594 for (e = p->succ; e; e = e->succ_next)
595 if (e->dest == s)
597 found_edge = 1;
598 break;
600 for (e = s->pred; e; e = e->pred_next)
601 if (e->src == p)
603 found_edge = 1;
604 break;
606 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
607 == EDGE_INDEX_NO_EDGE && found_edge != 0)
608 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
609 pred);
610 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
611 != EDGE_INDEX_NO_EDGE && found_edge == 0)
612 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
613 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
614 EXIT_BLOCK_PTR));
618 /* This routine will determine what, if any, edge there is between
619 a specified predecessor and successor. */
622 find_edge_index (edge_list, pred, succ)
623 struct edge_list *edge_list;
624 basic_block pred, succ;
626 int x;
627 for (x = 0; x < NUM_EDGES (edge_list); x++)
629 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
630 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
631 return x;
633 return (EDGE_INDEX_NO_EDGE);
636 /* Dump the list of basic blocks in the bitmap NODES. */
638 void
639 flow_nodes_print (str, nodes, file)
640 const char *str;
641 const sbitmap nodes;
642 FILE *file;
644 int node;
646 if (! nodes)
647 return;
649 fprintf (file, "%s { ", str);
650 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
651 fputs ("}\n", file);
654 /* Dump the list of edges in the array EDGE_LIST. */
656 void
657 flow_edge_list_print (str, edge_list, num_edges, file)
658 const char *str;
659 const edge *edge_list;
660 int num_edges;
661 FILE *file;
663 int i;
665 if (! edge_list)
666 return;
668 fprintf (file, "%s { ", str);
669 for (i = 0; i < num_edges; i++)
670 fprintf (file, "%d->%d ", edge_list[i]->src->index,
671 edge_list[i]->dest->index);
672 fputs ("}\n", file);
676 /* This routine will remove any fake successor edges for a basic block.
677 When the edge is removed, it is also removed from whatever predecessor
678 list it is in. */
680 static void
681 remove_fake_successors (bb)
682 basic_block bb;
684 edge e;
685 for (e = bb->succ; e;)
687 edge tmp = e;
688 e = e->succ_next;
689 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
690 remove_edge (tmp);
694 /* This routine will remove all fake edges from the flow graph. If
695 we remove all fake successors, it will automatically remove all
696 fake predecessors. */
698 void
699 remove_fake_edges ()
701 int x;
703 for (x = 0; x < n_basic_blocks; x++)
704 remove_fake_successors (BASIC_BLOCK (x));
706 /* We've handled all successors except the entry block's. */
707 remove_fake_successors (ENTRY_BLOCK_PTR);
710 /* This function will add a fake edge between any block which has no
711 successors, and the exit block. Some data flow equations require these
712 edges to exist. */
714 void
715 add_noreturn_fake_exit_edges ()
717 int x;
719 for (x = 0; x < n_basic_blocks; x++)
720 if (BASIC_BLOCK (x)->succ == NULL)
721 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
724 /* This function adds a fake edge between any infinite loops to the
725 exit block. Some optimizations require a path from each node to
726 the exit node.
728 See also Morgan, Figure 3.10, pp. 82-83.
730 The current implementation is ugly, not attempting to minimize the
731 number of inserted fake edges. To reduce the number of fake edges
732 to insert, add fake edges from _innermost_ loops containing only
733 nodes not reachable from the exit block. */
735 void
736 connect_infinite_loops_to_exit ()
738 basic_block unvisited_block;
740 /* Perform depth-first search in the reverse graph to find nodes
741 reachable from the exit block. */
742 struct depth_first_search_dsS dfs_ds;
744 flow_dfs_compute_reverse_init (&dfs_ds);
745 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
747 /* Repeatedly add fake edges, updating the unreachable nodes. */
748 while (1)
750 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
751 if (!unvisited_block)
752 break;
753 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
754 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
757 flow_dfs_compute_reverse_finish (&dfs_ds);
759 return;
762 /* Compute reverse top sort order */
763 void
764 flow_reverse_top_sort_order_compute (rts_order)
765 int *rts_order;
767 edge *stack;
768 int sp;
769 int postnum = 0;
770 sbitmap visited;
772 /* Allocate stack for back-tracking up CFG. */
773 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
774 sp = 0;
776 /* Allocate bitmap to track nodes that have been visited. */
777 visited = sbitmap_alloc (n_basic_blocks);
779 /* None of the nodes in the CFG have been visited yet. */
780 sbitmap_zero (visited);
782 /* Push the first edge on to the stack. */
783 stack[sp++] = ENTRY_BLOCK_PTR->succ;
785 while (sp)
787 edge e;
788 basic_block src;
789 basic_block dest;
791 /* Look at the edge on the top of the stack. */
792 e = stack[sp - 1];
793 src = e->src;
794 dest = e->dest;
796 /* Check if the edge destination has been visited yet. */
797 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
799 /* Mark that we have visited the destination. */
800 SET_BIT (visited, dest->index);
802 if (dest->succ)
804 /* Since the DEST node has been visited for the first
805 time, check its successors. */
806 stack[sp++] = dest->succ;
808 else
809 rts_order[postnum++] = dest->index;
811 else
813 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
814 rts_order[postnum++] = src->index;
816 if (e->succ_next)
817 stack[sp - 1] = e->succ_next;
818 else
819 sp--;
823 free (stack);
824 sbitmap_free (visited);
827 /* Compute the depth first search order and store in the array
828 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
829 RC_ORDER is non-zero, return the reverse completion number for each
830 node. Returns the number of nodes visited. A depth first search
831 tries to get as far away from the starting point as quickly as
832 possible. */
835 flow_depth_first_order_compute (dfs_order, rc_order)
836 int *dfs_order;
837 int *rc_order;
839 edge *stack;
840 int sp;
841 int dfsnum = 0;
842 int rcnum = n_basic_blocks - 1;
843 sbitmap visited;
845 /* Allocate stack for back-tracking up CFG. */
846 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
847 sp = 0;
849 /* Allocate bitmap to track nodes that have been visited. */
850 visited = sbitmap_alloc (n_basic_blocks);
852 /* None of the nodes in the CFG have been visited yet. */
853 sbitmap_zero (visited);
855 /* Push the first edge on to the stack. */
856 stack[sp++] = ENTRY_BLOCK_PTR->succ;
858 while (sp)
860 edge e;
861 basic_block src;
862 basic_block dest;
864 /* Look at the edge on the top of the stack. */
865 e = stack[sp - 1];
866 src = e->src;
867 dest = e->dest;
869 /* Check if the edge destination has been visited yet. */
870 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
872 /* Mark that we have visited the destination. */
873 SET_BIT (visited, dest->index);
875 if (dfs_order)
876 dfs_order[dfsnum++] = dest->index;
878 if (dest->succ)
880 /* Since the DEST node has been visited for the first
881 time, check its successors. */
882 stack[sp++] = dest->succ;
884 else
886 /* There are no successors for the DEST node so assign
887 its reverse completion number. */
888 if (rc_order)
889 rc_order[rcnum--] = dest->index;
892 else
894 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
896 /* There are no more successors for the SRC node
897 so assign its reverse completion number. */
898 if (rc_order)
899 rc_order[rcnum--] = src->index;
902 if (e->succ_next)
903 stack[sp - 1] = e->succ_next;
904 else
905 sp--;
909 free (stack);
910 sbitmap_free (visited);
912 /* The number of nodes visited should not be greater than
913 n_basic_blocks. */
914 if (dfsnum > n_basic_blocks)
915 abort ();
917 /* There are some nodes left in the CFG that are unreachable. */
918 if (dfsnum < n_basic_blocks)
919 abort ();
920 return dfsnum;
923 /* Compute the depth first search order on the _reverse_ graph and
924 store in the array DFS_ORDER, marking the nodes visited in VISITED.
925 Returns the number of nodes visited.
927 The computation is split into three pieces:
929 flow_dfs_compute_reverse_init () creates the necessary data
930 structures.
932 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
933 structures. The block will start the search.
935 flow_dfs_compute_reverse_execute () continues (or starts) the
936 search using the block on the top of the stack, stopping when the
937 stack is empty.
939 flow_dfs_compute_reverse_finish () destroys the necessary data
940 structures.
942 Thus, the user will probably call ..._init(), call ..._add_bb() to
943 add a beginning basic block to the stack, call ..._execute(),
944 possibly add another bb to the stack and again call ..._execute(),
945 ..., and finally call _finish(). */
947 /* Initialize the data structures used for depth-first search on the
948 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
949 added to the basic block stack. DATA is the current depth-first
950 search context. If INITIALIZE_STACK is non-zero, there is an
951 element on the stack. */
953 static void
954 flow_dfs_compute_reverse_init (data)
955 depth_first_search_ds data;
957 /* Allocate stack for back-tracking up CFG. */
958 data->stack =
959 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
960 * sizeof (basic_block));
961 data->sp = 0;
963 /* Allocate bitmap to track nodes that have been visited. */
964 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
966 /* None of the nodes in the CFG have been visited yet. */
967 sbitmap_zero (data->visited_blocks);
969 return;
972 /* Add the specified basic block to the top of the dfs data
973 structures. When the search continues, it will start at the
974 block. */
976 static void
977 flow_dfs_compute_reverse_add_bb (data, bb)
978 depth_first_search_ds data;
979 basic_block bb;
981 data->stack[data->sp++] = bb;
982 return;
985 /* Continue the depth-first search through the reverse graph starting
986 with the block at the stack's top and ending when the stack is
987 empty. Visited nodes are marked. Returns an unvisited basic
988 block, or NULL if there is none available. */
990 static basic_block
991 flow_dfs_compute_reverse_execute (data)
992 depth_first_search_ds data;
994 basic_block bb;
995 edge e;
996 int i;
998 while (data->sp > 0)
1000 bb = data->stack[--data->sp];
1002 /* Mark that we have visited this node. */
1003 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1005 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1007 /* Perform depth-first search on adjacent vertices. */
1008 for (e = bb->pred; e; e = e->pred_next)
1009 flow_dfs_compute_reverse_add_bb (data, e->src);
1013 /* Determine if there are unvisited basic blocks. */
1014 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
1015 if (!TEST_BIT (data->visited_blocks, i))
1016 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1017 return NULL;
1020 /* Destroy the data structures needed for depth-first search on the
1021 reverse graph. */
1023 static void
1024 flow_dfs_compute_reverse_finish (data)
1025 depth_first_search_ds data;
1027 free (data->stack);
1028 sbitmap_free (data->visited_blocks);
1029 return;