(in m32rx patch): Replace "." with "@." when preceeded by a capital letter
[official-gcc.git] / gcc / cfganal.c
blob679a6e67fda4099960a936c79ce5bdbad78ca72a
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "toplev.h"
30 #include "obstack.h"
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS {
34 /* stack for backtracking during the algorithm */
35 basic_block *stack;
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
38 have edges. */
39 unsigned int sp;
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks;
44 typedef struct depth_first_search_dsS *depth_first_search_ds;
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds, basic_block));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds));
54 static void remove_fake_successors PARAMS ((basic_block));
55 static bool need_fake_edge_p PARAMS ((rtx));
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
59 bool
60 forwarder_block_p (bb)
61 basic_block bb;
63 rtx insn = bb->head;
64 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
65 || !bb->succ || bb->succ->succ_next)
66 return false;
68 while (insn != bb->end)
70 if (INSN_P (insn) && active_insn_p (insn))
71 return false;
72 insn = NEXT_INSN (insn);
74 return (!INSN_P (insn)
75 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
76 || !active_insn_p (insn));
79 /* Return nonzero if we can reach target from src by falling through. */
80 bool
81 can_fallthru (src, target)
82 basic_block src, target;
84 rtx insn = src->end;
85 rtx insn2 = target->head;
87 if (src->index + 1 == target->index && !active_insn_p (insn2))
88 insn2 = next_active_insn (insn2);
89 /* ??? Later we may add code to move jump tables offline. */
90 return next_active_insn (insn) == insn2;
93 /* Mark the back edges in DFS traversal.
94 Return non-zero if a loop (natural or otherwise) is present.
95 Inspired by Depth_First_Search_PP described in:
97 Advanced Compiler Design and Implementation
98 Steven Muchnick
99 Morgan Kaufmann, 1997
101 and heavily borrowed from flow_depth_first_order_compute. */
103 bool
104 mark_dfs_back_edges ()
106 edge *stack;
107 int *pre;
108 int *post;
109 int sp;
110 int prenum = 1;
111 int postnum = 1;
112 sbitmap visited;
113 bool found = false;
115 /* Allocate the preorder and postorder number arrays. */
116 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
117 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
119 /* Allocate stack for back-tracking up CFG. */
120 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
121 sp = 0;
123 /* Allocate bitmap to track nodes that have been visited. */
124 visited = sbitmap_alloc (n_basic_blocks);
126 /* None of the nodes in the CFG have been visited yet. */
127 sbitmap_zero (visited);
129 /* Push the first edge on to the stack. */
130 stack[sp++] = ENTRY_BLOCK_PTR->succ;
132 while (sp)
134 edge e;
135 basic_block src;
136 basic_block dest;
138 /* Look at the edge on the top of the stack. */
139 e = stack[sp - 1];
140 src = e->src;
141 dest = e->dest;
142 e->flags &= ~EDGE_DFS_BACK;
144 /* Check if the edge destination has been visited yet. */
145 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
147 /* Mark that we have visited the destination. */
148 SET_BIT (visited, dest->index);
150 pre[dest->index] = prenum++;
152 if (dest->succ)
154 /* Since the DEST node has been visited for the first
155 time, check its successors. */
156 stack[sp++] = dest->succ;
158 else
159 post[dest->index] = postnum++;
161 else
163 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
164 && pre[src->index] >= pre[dest->index]
165 && post[dest->index] == 0)
166 e->flags |= EDGE_DFS_BACK, found = true;
168 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
169 post[src->index] = postnum++;
171 if (e->succ_next)
172 stack[sp - 1] = e->succ_next;
173 else
174 sp--;
178 free (pre);
179 free (post);
180 free (stack);
181 sbitmap_free (visited);
183 return found;
186 /* Return true if we need to add fake edge to exit.
187 Helper function for the flow_call_edges_add. */
189 static bool
190 need_fake_edge_p (insn)
191 rtx insn;
193 if (!INSN_P (insn))
194 return false;
196 if ((GET_CODE (insn) == CALL_INSN
197 && !SIBLING_CALL_P (insn)
198 && !find_reg_note (insn, REG_NORETURN, NULL)
199 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
200 && !CONST_OR_PURE_CALL_P (insn)))
201 return true;
203 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
204 && MEM_VOLATILE_P (PATTERN (insn)))
205 || (GET_CODE (PATTERN (insn)) == PARALLEL
206 && asm_noperands (insn) != -1
207 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
208 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
211 /* Add fake edges to the function exit for any non constant and non noreturn
212 calls, volatile inline assembly in the bitmap of blocks specified by
213 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
214 that were split.
216 The goal is to expose cases in which entering a basic block does not imply
217 that all subsequent instructions must be executed. */
220 flow_call_edges_add (blocks)
221 sbitmap blocks;
223 int i;
224 int blocks_split = 0;
225 int bb_num = 0;
226 basic_block *bbs;
227 bool check_last_block = false;
229 /* Map bb indices into basic block pointers since split_block
230 will renumber the basic blocks. */
232 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
234 if (! blocks)
236 for (i = 0; i < n_basic_blocks; i++)
237 bbs[bb_num++] = BASIC_BLOCK (i);
238 check_last_block = true;
240 else
242 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
244 bbs[bb_num++] = BASIC_BLOCK (i);
245 if (i == n_basic_blocks - 1)
246 check_last_block = true;
250 /* In the last basic block, before epilogue generation, there will be
251 a fallthru edge to EXIT. Special care is required if the last insn
252 of the last basic block is a call because make_edge folds duplicate
253 edges, which would result in the fallthru edge also being marked
254 fake, which would result in the fallthru edge being removed by
255 remove_fake_edges, which would result in an invalid CFG.
257 Moreover, we can't elide the outgoing fake edge, since the block
258 profiler needs to take this into account in order to solve the minimal
259 spanning tree in the case that the call doesn't return.
261 Handle this by adding a dummy instruction in a new last basic block. */
262 if (check_last_block
263 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
265 edge e;
266 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
267 if (e->dest == EXIT_BLOCK_PTR)
268 break;
269 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
270 commit_edge_insertions ();
274 /* Now add fake edges to the function exit for any non constant
275 calls since there is no way that we can determine if they will
276 return or not... */
278 for (i = 0; i < bb_num; i++)
280 basic_block bb = bbs[i];
281 rtx insn;
282 rtx prev_insn;
284 for (insn = bb->end; ; insn = prev_insn)
286 prev_insn = PREV_INSN (insn);
287 if (need_fake_edge_p (insn))
289 edge e;
291 /* The above condition should be enough to verify that there is
292 no edge to the exit block in CFG already. Calling make_edge in
293 such case would make us to mark that edge as fake and remove it
294 later. */
295 #ifdef ENABLE_CHECKING
296 if (insn == bb->end)
297 for (e = bb->succ; e; e = e->succ_next)
298 if (e->dest == EXIT_BLOCK_PTR)
299 abort ();
300 #endif
302 /* Note that the following may create a new basic block
303 and renumber the existing basic blocks. */
304 e = split_block (bb, insn);
305 if (e)
306 blocks_split++;
308 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
310 if (insn == bb->head)
311 break;
315 if (blocks_split)
316 verify_flow_info ();
318 free (bbs);
319 return blocks_split;
321 /* Find unreachable blocks. An unreachable block will have 0 in
322 the reachable bit in block->flags. A non-zero value indicates the
323 block is reachable. */
325 void
326 find_unreachable_blocks ()
328 edge e;
329 int i, n;
330 basic_block *tos, *worklist;
332 n = n_basic_blocks;
333 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
335 /* Clear all the reachability flags. */
337 for (i = 0; i < n; ++i)
338 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
340 /* Add our starting points to the worklist. Almost always there will
341 be only one. It isn't inconceivable that we might one day directly
342 support Fortran alternate entry points. */
344 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
346 *tos++ = e->dest;
348 /* Mark the block reachable. */
349 e->dest->flags |= BB_REACHABLE;
352 /* Iterate: find everything reachable from what we've already seen. */
354 while (tos != worklist)
356 basic_block b = *--tos;
358 for (e = b->succ; e; e = e->succ_next)
359 if (!(e->dest->flags & BB_REACHABLE))
361 *tos++ = e->dest;
362 e->dest->flags |= BB_REACHABLE;
366 free (worklist);
369 /* Functions to access an edge list with a vector representation.
370 Enough data is kept such that given an index number, the
371 pred and succ that edge represents can be determined, or
372 given a pred and a succ, its index number can be returned.
373 This allows algorithms which consume a lot of memory to
374 represent the normally full matrix of edge (pred,succ) with a
375 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
376 wasted space in the client code due to sparse flow graphs. */
378 /* This functions initializes the edge list. Basically the entire
379 flowgraph is processed, and all edges are assigned a number,
380 and the data structure is filled in. */
382 struct edge_list *
383 create_edge_list ()
385 struct edge_list *elist;
386 edge e;
387 int num_edges;
388 int x;
389 int block_count;
391 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
393 num_edges = 0;
395 /* Determine the number of edges in the flow graph by counting successor
396 edges on each basic block. */
397 for (x = 0; x < n_basic_blocks; x++)
399 basic_block bb = BASIC_BLOCK (x);
401 for (e = bb->succ; e; e = e->succ_next)
402 num_edges++;
404 /* Don't forget successors of the entry block. */
405 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
406 num_edges++;
408 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
409 elist->num_blocks = block_count;
410 elist->num_edges = num_edges;
411 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
413 num_edges = 0;
415 /* Follow successors of the entry block, and register these edges. */
416 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
418 elist->index_to_edge[num_edges] = e;
419 num_edges++;
422 for (x = 0; x < n_basic_blocks; x++)
424 basic_block bb = BASIC_BLOCK (x);
426 /* Follow all successors of blocks, and register these edges. */
427 for (e = bb->succ; e; e = e->succ_next)
429 elist->index_to_edge[num_edges] = e;
430 num_edges++;
433 return elist;
436 /* This function free's memory associated with an edge list. */
438 void
439 free_edge_list (elist)
440 struct edge_list *elist;
442 if (elist)
444 free (elist->index_to_edge);
445 free (elist);
449 /* This function provides debug output showing an edge list. */
451 void
452 print_edge_list (f, elist)
453 FILE *f;
454 struct edge_list *elist;
456 int x;
457 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
458 elist->num_blocks - 2, elist->num_edges);
460 for (x = 0; x < elist->num_edges; x++)
462 fprintf (f, " %-4d - edge(", x);
463 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
464 fprintf (f, "entry,");
465 else
466 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
468 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
469 fprintf (f, "exit)\n");
470 else
471 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
475 /* This function provides an internal consistency check of an edge list,
476 verifying that all edges are present, and that there are no
477 extra edges. */
479 void
480 verify_edge_list (f, elist)
481 FILE *f;
482 struct edge_list *elist;
484 int x, pred, succ, index;
485 edge e;
487 for (x = 0; x < n_basic_blocks; x++)
489 basic_block bb = BASIC_BLOCK (x);
491 for (e = bb->succ; e; e = e->succ_next)
493 pred = e->src->index;
494 succ = e->dest->index;
495 index = EDGE_INDEX (elist, e->src, e->dest);
496 if (index == EDGE_INDEX_NO_EDGE)
498 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
499 continue;
501 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
502 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
503 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
504 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
505 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
506 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
509 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
511 pred = e->src->index;
512 succ = e->dest->index;
513 index = EDGE_INDEX (elist, e->src, e->dest);
514 if (index == EDGE_INDEX_NO_EDGE)
516 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
517 continue;
519 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
520 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
521 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
522 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
523 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
524 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
526 /* We've verified that all the edges are in the list, no lets make sure
527 there are no spurious edges in the list. */
529 for (pred = 0; pred < n_basic_blocks; pred++)
530 for (succ = 0; succ < n_basic_blocks; succ++)
532 basic_block p = BASIC_BLOCK (pred);
533 basic_block s = BASIC_BLOCK (succ);
535 int found_edge = 0;
537 for (e = p->succ; e; e = e->succ_next)
538 if (e->dest == s)
540 found_edge = 1;
541 break;
543 for (e = s->pred; e; e = e->pred_next)
544 if (e->src == p)
546 found_edge = 1;
547 break;
549 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
550 == EDGE_INDEX_NO_EDGE && found_edge != 0)
551 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
552 pred, succ);
553 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
554 != EDGE_INDEX_NO_EDGE && found_edge == 0)
555 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
556 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
557 BASIC_BLOCK (succ)));
559 for (succ = 0; succ < n_basic_blocks; succ++)
561 basic_block p = ENTRY_BLOCK_PTR;
562 basic_block s = BASIC_BLOCK (succ);
564 int found_edge = 0;
566 for (e = p->succ; e; e = e->succ_next)
567 if (e->dest == s)
569 found_edge = 1;
570 break;
572 for (e = s->pred; e; e = e->pred_next)
573 if (e->src == p)
575 found_edge = 1;
576 break;
578 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
579 == EDGE_INDEX_NO_EDGE && found_edge != 0)
580 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
581 succ);
582 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
583 != EDGE_INDEX_NO_EDGE && found_edge == 0)
584 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
585 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
586 BASIC_BLOCK (succ)));
588 for (pred = 0; pred < n_basic_blocks; pred++)
590 basic_block p = BASIC_BLOCK (pred);
591 basic_block s = EXIT_BLOCK_PTR;
593 int found_edge = 0;
595 for (e = p->succ; e; e = e->succ_next)
596 if (e->dest == s)
598 found_edge = 1;
599 break;
601 for (e = s->pred; e; e = e->pred_next)
602 if (e->src == p)
604 found_edge = 1;
605 break;
607 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
608 == EDGE_INDEX_NO_EDGE && found_edge != 0)
609 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
610 pred);
611 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
612 != EDGE_INDEX_NO_EDGE && found_edge == 0)
613 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
614 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
615 EXIT_BLOCK_PTR));
619 /* This routine will determine what, if any, edge there is between
620 a specified predecessor and successor. */
623 find_edge_index (edge_list, pred, succ)
624 struct edge_list *edge_list;
625 basic_block pred, succ;
627 int x;
628 for (x = 0; x < NUM_EDGES (edge_list); x++)
630 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
631 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
632 return x;
634 return (EDGE_INDEX_NO_EDGE);
637 /* Dump the list of basic blocks in the bitmap NODES. */
639 void
640 flow_nodes_print (str, nodes, file)
641 const char *str;
642 const sbitmap nodes;
643 FILE *file;
645 int node;
647 if (! nodes)
648 return;
650 fprintf (file, "%s { ", str);
651 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
652 fputs ("}\n", file);
655 /* Dump the list of edges in the array EDGE_LIST. */
657 void
658 flow_edge_list_print (str, edge_list, num_edges, file)
659 const char *str;
660 const edge *edge_list;
661 int num_edges;
662 FILE *file;
664 int i;
666 if (! edge_list)
667 return;
669 fprintf (file, "%s { ", str);
670 for (i = 0; i < num_edges; i++)
671 fprintf (file, "%d->%d ", edge_list[i]->src->index,
672 edge_list[i]->dest->index);
673 fputs ("}\n", file);
677 /* This routine will remove any fake successor edges for a basic block.
678 When the edge is removed, it is also removed from whatever predecessor
679 list it is in. */
681 static void
682 remove_fake_successors (bb)
683 basic_block bb;
685 edge e;
686 for (e = bb->succ; e;)
688 edge tmp = e;
689 e = e->succ_next;
690 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
691 remove_edge (tmp);
695 /* This routine will remove all fake edges from the flow graph. If
696 we remove all fake successors, it will automatically remove all
697 fake predecessors. */
699 void
700 remove_fake_edges ()
702 int x;
704 for (x = 0; x < n_basic_blocks; x++)
705 remove_fake_successors (BASIC_BLOCK (x));
707 /* We've handled all successors except the entry block's. */
708 remove_fake_successors (ENTRY_BLOCK_PTR);
711 /* This function will add a fake edge between any block which has no
712 successors, and the exit block. Some data flow equations require these
713 edges to exist. */
715 void
716 add_noreturn_fake_exit_edges ()
718 int x;
720 for (x = 0; x < n_basic_blocks; x++)
721 if (BASIC_BLOCK (x)->succ == NULL)
722 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
725 /* This function adds a fake edge between any infinite loops to the
726 exit block. Some optimizations require a path from each node to
727 the exit node.
729 See also Morgan, Figure 3.10, pp. 82-83.
731 The current implementation is ugly, not attempting to minimize the
732 number of inserted fake edges. To reduce the number of fake edges
733 to insert, add fake edges from _innermost_ loops containing only
734 nodes not reachable from the exit block. */
736 void
737 connect_infinite_loops_to_exit ()
739 basic_block unvisited_block;
741 /* Perform depth-first search in the reverse graph to find nodes
742 reachable from the exit block. */
743 struct depth_first_search_dsS dfs_ds;
745 flow_dfs_compute_reverse_init (&dfs_ds);
746 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
748 /* Repeatedly add fake edges, updating the unreachable nodes. */
749 while (1)
751 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
752 if (!unvisited_block)
753 break;
754 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
755 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
758 flow_dfs_compute_reverse_finish (&dfs_ds);
760 return;
763 /* Compute reverse top sort order */
764 void
765 flow_reverse_top_sort_order_compute (rts_order)
766 int *rts_order;
768 edge *stack;
769 int sp;
770 int postnum = 0;
771 sbitmap visited;
773 /* Allocate stack for back-tracking up CFG. */
774 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
775 sp = 0;
777 /* Allocate bitmap to track nodes that have been visited. */
778 visited = sbitmap_alloc (n_basic_blocks);
780 /* None of the nodes in the CFG have been visited yet. */
781 sbitmap_zero (visited);
783 /* Push the first edge on to the stack. */
784 stack[sp++] = ENTRY_BLOCK_PTR->succ;
786 while (sp)
788 edge e;
789 basic_block src;
790 basic_block dest;
792 /* Look at the edge on the top of the stack. */
793 e = stack[sp - 1];
794 src = e->src;
795 dest = e->dest;
797 /* Check if the edge destination has been visited yet. */
798 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
800 /* Mark that we have visited the destination. */
801 SET_BIT (visited, dest->index);
803 if (dest->succ)
805 /* Since the DEST node has been visited for the first
806 time, check its successors. */
807 stack[sp++] = dest->succ;
809 else
810 rts_order[postnum++] = dest->index;
812 else
814 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
815 rts_order[postnum++] = src->index;
817 if (e->succ_next)
818 stack[sp - 1] = e->succ_next;
819 else
820 sp--;
824 free (stack);
825 sbitmap_free (visited);
828 /* Compute the depth first search order and store in the array
829 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
830 RC_ORDER is non-zero, return the reverse completion number for each
831 node. Returns the number of nodes visited. A depth first search
832 tries to get as far away from the starting point as quickly as
833 possible. */
836 flow_depth_first_order_compute (dfs_order, rc_order)
837 int *dfs_order;
838 int *rc_order;
840 edge *stack;
841 int sp;
842 int dfsnum = 0;
843 int rcnum = n_basic_blocks - 1;
844 sbitmap visited;
846 /* Allocate stack for back-tracking up CFG. */
847 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
848 sp = 0;
850 /* Allocate bitmap to track nodes that have been visited. */
851 visited = sbitmap_alloc (n_basic_blocks);
853 /* None of the nodes in the CFG have been visited yet. */
854 sbitmap_zero (visited);
856 /* Push the first edge on to the stack. */
857 stack[sp++] = ENTRY_BLOCK_PTR->succ;
859 while (sp)
861 edge e;
862 basic_block src;
863 basic_block dest;
865 /* Look at the edge on the top of the stack. */
866 e = stack[sp - 1];
867 src = e->src;
868 dest = e->dest;
870 /* Check if the edge destination has been visited yet. */
871 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
873 /* Mark that we have visited the destination. */
874 SET_BIT (visited, dest->index);
876 if (dfs_order)
877 dfs_order[dfsnum++] = dest->index;
879 if (dest->succ)
881 /* Since the DEST node has been visited for the first
882 time, check its successors. */
883 stack[sp++] = dest->succ;
885 else
887 /* There are no successors for the DEST node so assign
888 its reverse completion number. */
889 if (rc_order)
890 rc_order[rcnum--] = dest->index;
893 else
895 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
897 /* There are no more successors for the SRC node
898 so assign its reverse completion number. */
899 if (rc_order)
900 rc_order[rcnum--] = src->index;
903 if (e->succ_next)
904 stack[sp - 1] = e->succ_next;
905 else
906 sp--;
910 free (stack);
911 sbitmap_free (visited);
913 /* The number of nodes visited should not be greater than
914 n_basic_blocks. */
915 if (dfsnum > n_basic_blocks)
916 abort ();
918 /* There are some nodes left in the CFG that are unreachable. */
919 if (dfsnum < n_basic_blocks)
920 abort ();
921 return dfsnum;
924 /* Compute the depth first search order on the _reverse_ graph and
925 store in the array DFS_ORDER, marking the nodes visited in VISITED.
926 Returns the number of nodes visited.
928 The computation is split into three pieces:
930 flow_dfs_compute_reverse_init () creates the necessary data
931 structures.
933 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
934 structures. The block will start the search.
936 flow_dfs_compute_reverse_execute () continues (or starts) the
937 search using the block on the top of the stack, stopping when the
938 stack is empty.
940 flow_dfs_compute_reverse_finish () destroys the necessary data
941 structures.
943 Thus, the user will probably call ..._init(), call ..._add_bb() to
944 add a beginning basic block to the stack, call ..._execute(),
945 possibly add another bb to the stack and again call ..._execute(),
946 ..., and finally call _finish(). */
948 /* Initialize the data structures used for depth-first search on the
949 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
950 added to the basic block stack. DATA is the current depth-first
951 search context. If INITIALIZE_STACK is non-zero, there is an
952 element on the stack. */
954 static void
955 flow_dfs_compute_reverse_init (data)
956 depth_first_search_ds data;
958 /* Allocate stack for back-tracking up CFG. */
959 data->stack =
960 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
961 * sizeof (basic_block));
962 data->sp = 0;
964 /* Allocate bitmap to track nodes that have been visited. */
965 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
967 /* None of the nodes in the CFG have been visited yet. */
968 sbitmap_zero (data->visited_blocks);
970 return;
973 /* Add the specified basic block to the top of the dfs data
974 structures. When the search continues, it will start at the
975 block. */
977 static void
978 flow_dfs_compute_reverse_add_bb (data, bb)
979 depth_first_search_ds data;
980 basic_block bb;
982 data->stack[data->sp++] = bb;
983 return;
986 /* Continue the depth-first search through the reverse graph starting
987 with the block at the stack's top and ending when the stack is
988 empty. Visited nodes are marked. Returns an unvisited basic
989 block, or NULL if there is none available. */
991 static basic_block
992 flow_dfs_compute_reverse_execute (data)
993 depth_first_search_ds data;
995 basic_block bb;
996 edge e;
997 int i;
999 while (data->sp > 0)
1001 bb = data->stack[--data->sp];
1003 /* Mark that we have visited this node. */
1004 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1006 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1008 /* Perform depth-first search on adjacent vertices. */
1009 for (e = bb->pred; e; e = e->pred_next)
1010 flow_dfs_compute_reverse_add_bb (data, e->src);
1014 /* Determine if there are unvisited basic blocks. */
1015 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
1016 if (!TEST_BIT (data->visited_blocks, i))
1017 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1018 return NULL;
1021 /* Destroy the data structures needed for depth-first search on the
1022 reverse graph. */
1024 static void
1025 flow_dfs_compute_reverse_finish (data)
1026 depth_first_search_ds data;
1028 free (data->stack);
1029 sbitmap_free (data->visited_blocks);
1030 return;