1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains various simple utilities to analyze the CFG. */
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS
{
34 /* stack for backtracking during the algorithm */
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks
;
44 typedef struct depth_first_search_dsS
*depth_first_search_ds
;
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds
));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds
, basic_block
));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds
));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds
));
54 static void remove_fake_successors
PARAMS ((basic_block
));
55 static bool need_fake_edge_p
PARAMS ((rtx
));
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
60 forwarder_block_p (bb
)
64 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
65 || !bb
->succ
|| bb
->succ
->succ_next
)
68 while (insn
!= bb
->end
)
70 if (active_insn_p (insn
))
72 insn
= NEXT_INSN (insn
);
74 return (!active_insn_p (insn
)
75 || (GET_CODE (insn
) == JUMP_INSN
&& onlyjump_p (insn
)));
78 /* Return nonzero if we can reach target from src by falling trought. */
80 can_fallthru (src
, target
)
81 basic_block src
, target
;
84 rtx insn2
= target
->head
;
86 if (src
->index
+ 1 == target
->index
&& !active_insn_p (insn2
))
87 insn2
= next_active_insn (insn2
);
88 /* ??? Later we may add code to move jump tables offline. */
89 return next_active_insn (insn
) == insn2
;
92 /* Identify critical edges and set the bits appropriately. */
95 mark_critical_edges ()
97 int i
, n
= n_basic_blocks
;
100 /* We begin with the entry block. This is not terribly important now,
101 but could be if a front end (Fortran) implemented alternate entry
103 bb
= ENTRY_BLOCK_PTR
;
110 /* (1) Critical edges must have a source with multiple successors. */
111 if (bb
->succ
&& bb
->succ
->succ_next
)
113 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
115 /* (2) Critical edges must have a destination with multiple
116 predecessors. Note that we know there is at least one
117 predecessor -- the edge we followed to get here. */
118 if (e
->dest
->pred
->pred_next
)
119 e
->flags
|= EDGE_CRITICAL
;
121 e
->flags
&= ~EDGE_CRITICAL
;
126 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
127 e
->flags
&= ~EDGE_CRITICAL
;
132 bb
= BASIC_BLOCK (i
);
136 /* Mark the back edges in DFS traversal.
137 Return non-zero if a loop (natural or otherwise) is present.
138 Inspired by Depth_First_Search_PP described in:
140 Advanced Compiler Design and Implementation
142 Morgan Kaufmann, 1997
144 and heavily borrowed from flow_depth_first_order_compute. */
147 mark_dfs_back_edges ()
158 /* Allocate the preorder and postorder number arrays. */
159 pre
= (int *) xcalloc (n_basic_blocks
, sizeof (int));
160 post
= (int *) xcalloc (n_basic_blocks
, sizeof (int));
162 /* Allocate stack for back-tracking up CFG. */
163 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
166 /* Allocate bitmap to track nodes that have been visited. */
167 visited
= sbitmap_alloc (n_basic_blocks
);
169 /* None of the nodes in the CFG have been visited yet. */
170 sbitmap_zero (visited
);
172 /* Push the first edge on to the stack. */
173 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
181 /* Look at the edge on the top of the stack. */
185 e
->flags
&= ~EDGE_DFS_BACK
;
187 /* Check if the edge destination has been visited yet. */
188 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
190 /* Mark that we have visited the destination. */
191 SET_BIT (visited
, dest
->index
);
193 pre
[dest
->index
] = prenum
++;
197 /* Since the DEST node has been visited for the first
198 time, check its successors. */
199 stack
[sp
++] = dest
->succ
;
202 post
[dest
->index
] = postnum
++;
206 if (dest
!= EXIT_BLOCK_PTR
&& src
!= ENTRY_BLOCK_PTR
207 && pre
[src
->index
] >= pre
[dest
->index
]
208 && post
[dest
->index
] == 0)
209 e
->flags
|= EDGE_DFS_BACK
, found
= true;
211 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
212 post
[src
->index
] = postnum
++;
215 stack
[sp
- 1] = e
->succ_next
;
224 sbitmap_free (visited
);
229 /* Return true if we need to add fake edge to exit.
230 Helper function for the flow_call_edges_add. */
233 need_fake_edge_p (insn
)
239 if ((GET_CODE (insn
) == CALL_INSN
240 && !SIBLING_CALL_P (insn
)
241 && !find_reg_note (insn
, REG_NORETURN
, NULL
)
242 && !find_reg_note (insn
, REG_ALWAYS_RETURN
, NULL
)
243 && !CONST_OR_PURE_CALL_P (insn
)))
246 return ((GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
247 && MEM_VOLATILE_P (PATTERN (insn
)))
248 || (GET_CODE (PATTERN (insn
)) == PARALLEL
249 && asm_noperands (insn
) != -1
250 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn
), 0, 0)))
251 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
254 /* Add fake edges to the function exit for any non constant and non noreturn
255 calls, volatile inline assembly in the bitmap of blocks specified by
256 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
259 The goal is to expose cases in which entering a basic block does not imply
260 that all subsequent instructions must be executed. */
263 flow_call_edges_add (blocks
)
267 int blocks_split
= 0;
270 bool check_last_block
= false;
272 /* Map bb indicies into basic block pointers since split_block
273 will renumber the basic blocks. */
275 bbs
= xmalloc (n_basic_blocks
* sizeof (*bbs
));
279 for (i
= 0; i
< n_basic_blocks
; i
++)
280 bbs
[bb_num
++] = BASIC_BLOCK (i
);
281 check_last_block
= true;
285 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
287 bbs
[bb_num
++] = BASIC_BLOCK (i
);
288 if (i
== n_basic_blocks
- 1)
289 check_last_block
= true;
293 /* In the last basic block, before epilogue generation, there will be
294 a fallthru edge to EXIT. Special care is required if the last insn
295 of the last basic block is a call because make_edge folds duplicate
296 edges, which would result in the fallthru edge also being marked
297 fake, which would result in the fallthru edge being removed by
298 remove_fake_edges, which would result in an invalid CFG.
300 Moreover, we can't elide the outgoing fake edge, since the block
301 profiler needs to take this into account in order to solve the minimal
302 spanning tree in the case that the call doesn't return.
304 Handle this by adding a dummy instruction in a new last basic block. */
306 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks
- 1)->end
))
309 for (e
= BASIC_BLOCK (n_basic_blocks
- 1)->succ
; e
; e
= e
->succ_next
)
310 if (e
->dest
== EXIT_BLOCK_PTR
)
312 insert_insn_on_edge (gen_rtx_USE (VOIDmode
, const0_rtx
), e
);
313 commit_edge_insertions ();
317 /* Now add fake edges to the function exit for any non constant
318 calls since there is no way that we can determine if they will
321 for (i
= 0; i
< bb_num
; i
++)
323 basic_block bb
= bbs
[i
];
327 for (insn
= bb
->end
; ; insn
= prev_insn
)
329 prev_insn
= PREV_INSN (insn
);
330 if (need_fake_edge_p (insn
))
334 /* The above condition should be enought to verify that there is
335 no edge to the exit block in CFG already. Calling make_edge in
336 such case would make us to mark that edge as fake and remove it
338 #ifdef ENABLE_CHECKING
340 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
341 if (e
->dest
== EXIT_BLOCK_PTR
)
345 /* Note that the following may create a new basic block
346 and renumber the existing basic blocks. */
347 e
= split_block (bb
, insn
);
351 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
353 if (insn
== bb
->head
)
364 /* Find unreachable blocks. An unreachable block will have 0 in
365 the reachable bit in block->flags. A non-zero value indicates the
366 block is reachable. */
369 find_unreachable_blocks ()
373 basic_block
*tos
, *worklist
;
376 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
) * n
);
378 /* Clear all the reachability flags. */
380 for (i
= 0; i
< n
; ++i
)
381 BASIC_BLOCK (i
)->flags
&= ~BB_REACHABLE
;
383 /* Add our starting points to the worklist. Almost always there will
384 be only one. It isn't inconcievable that we might one day directly
385 support Fortran alternate entry points. */
387 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
391 /* Mark the block reachable. */
392 e
->dest
->flags
|= BB_REACHABLE
;
395 /* Iterate: find everything reachable from what we've already seen. */
397 while (tos
!= worklist
)
399 basic_block b
= *--tos
;
401 for (e
= b
->succ
; e
; e
= e
->succ_next
)
402 if (!(e
->dest
->flags
& BB_REACHABLE
))
405 e
->dest
->flags
|= BB_REACHABLE
;
412 /* Functions to access an edge list with a vector representation.
413 Enough data is kept such that given an index number, the
414 pred and succ that edge represents can be determined, or
415 given a pred and a succ, its index number can be returned.
416 This allows algorithms which consume a lot of memory to
417 represent the normally full matrix of edge (pred,succ) with a
418 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
419 wasted space in the client code due to sparse flow graphs. */
421 /* This functions initializes the edge list. Basically the entire
422 flowgraph is processed, and all edges are assigned a number,
423 and the data structure is filled in. */
428 struct edge_list
*elist
;
434 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
438 /* Determine the number of edges in the flow graph by counting successor
439 edges on each basic block. */
440 for (x
= 0; x
< n_basic_blocks
; x
++)
442 basic_block bb
= BASIC_BLOCK (x
);
444 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
447 /* Don't forget successors of the entry block. */
448 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
451 elist
= (struct edge_list
*) xmalloc (sizeof (struct edge_list
));
452 elist
->num_blocks
= block_count
;
453 elist
->num_edges
= num_edges
;
454 elist
->index_to_edge
= (edge
*) xmalloc (sizeof (edge
) * num_edges
);
458 /* Follow successors of the entry block, and register these edges. */
459 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
461 elist
->index_to_edge
[num_edges
] = e
;
465 for (x
= 0; x
< n_basic_blocks
; x
++)
467 basic_block bb
= BASIC_BLOCK (x
);
469 /* Follow all successors of blocks, and register these edges. */
470 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
472 elist
->index_to_edge
[num_edges
] = e
;
479 /* This function free's memory associated with an edge list. */
482 free_edge_list (elist
)
483 struct edge_list
*elist
;
487 free (elist
->index_to_edge
);
492 /* This function provides debug output showing an edge list. */
495 print_edge_list (f
, elist
)
497 struct edge_list
*elist
;
500 fprintf (f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
501 elist
->num_blocks
- 2, elist
->num_edges
);
503 for (x
= 0; x
< elist
->num_edges
; x
++)
505 fprintf (f
, " %-4d - edge(", x
);
506 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
507 fprintf (f
, "entry,");
509 fprintf (f
, "%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
511 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
512 fprintf (f
, "exit)\n");
514 fprintf (f
, "%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
518 /* This function provides an internal consistency check of an edge list,
519 verifying that all edges are present, and that there are no
523 verify_edge_list (f
, elist
)
525 struct edge_list
*elist
;
527 int x
, pred
, succ
, index
;
530 for (x
= 0; x
< n_basic_blocks
; x
++)
532 basic_block bb
= BASIC_BLOCK (x
);
534 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
536 pred
= e
->src
->index
;
537 succ
= e
->dest
->index
;
538 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
539 if (index
== EDGE_INDEX_NO_EDGE
)
541 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
544 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
545 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
546 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
547 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
548 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
549 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
552 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
554 pred
= e
->src
->index
;
555 succ
= e
->dest
->index
;
556 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
557 if (index
== EDGE_INDEX_NO_EDGE
)
559 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
562 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
563 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
564 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
565 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
566 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
567 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
569 /* We've verified that all the edges are in the list, no lets make sure
570 there are no spurious edges in the list. */
572 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
573 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
575 basic_block p
= BASIC_BLOCK (pred
);
576 basic_block s
= BASIC_BLOCK (succ
);
580 for (e
= p
->succ
; e
; e
= e
->succ_next
)
586 for (e
= s
->pred
; e
; e
= e
->pred_next
)
592 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
593 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
594 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
596 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
597 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
598 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
599 pred
, succ
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
600 BASIC_BLOCK (succ
)));
602 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
604 basic_block p
= ENTRY_BLOCK_PTR
;
605 basic_block s
= BASIC_BLOCK (succ
);
609 for (e
= p
->succ
; e
; e
= e
->succ_next
)
615 for (e
= s
->pred
; e
; e
= e
->pred_next
)
621 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
622 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
623 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
625 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
626 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
627 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
628 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
,
629 BASIC_BLOCK (succ
)));
631 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
633 basic_block p
= BASIC_BLOCK (pred
);
634 basic_block s
= EXIT_BLOCK_PTR
;
638 for (e
= p
->succ
; e
; e
= e
->succ_next
)
644 for (e
= s
->pred
; e
; e
= e
->pred_next
)
650 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
651 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
652 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
654 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
655 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
656 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
657 pred
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
662 /* This routine will determine what, if any, edge there is between
663 a specified predecessor and successor. */
666 find_edge_index (edge_list
, pred
, succ
)
667 struct edge_list
*edge_list
;
668 basic_block pred
, succ
;
671 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
673 if (INDEX_EDGE_PRED_BB (edge_list
, x
) == pred
674 && INDEX_EDGE_SUCC_BB (edge_list
, x
) == succ
)
677 return (EDGE_INDEX_NO_EDGE
);
680 /* Dump the list of basic blocks in the bitmap NODES. */
683 flow_nodes_print (str
, nodes
, file
)
693 fprintf (file
, "%s { ", str
);
694 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {fprintf (file
, "%d ", node
);});
698 /* Dump the list of edges in the array EDGE_LIST. */
701 flow_edge_list_print (str
, edge_list
, num_edges
, file
)
703 const edge
*edge_list
;
712 fprintf (file
, "%s { ", str
);
713 for (i
= 0; i
< num_edges
; i
++)
714 fprintf (file
, "%d->%d ", edge_list
[i
]->src
->index
,
715 edge_list
[i
]->dest
->index
);
720 /* This routine will remove any fake successor edges for a basic block.
721 When the edge is removed, it is also removed from whatever predecessor
725 remove_fake_successors (bb
)
729 for (e
= bb
->succ
; e
;)
733 if ((tmp
->flags
& EDGE_FAKE
) == EDGE_FAKE
)
738 /* This routine will remove all fake edges from the flow graph. If
739 we remove all fake successors, it will automatically remove all
740 fake predecessors. */
747 for (x
= 0; x
< n_basic_blocks
; x
++)
748 remove_fake_successors (BASIC_BLOCK (x
));
750 /* We've handled all successors except the entry block's. */
751 remove_fake_successors (ENTRY_BLOCK_PTR
);
754 /* This function will add a fake edge between any block which has no
755 successors, and the exit block. Some data flow equations require these
759 add_noreturn_fake_exit_edges ()
763 for (x
= 0; x
< n_basic_blocks
; x
++)
764 if (BASIC_BLOCK (x
)->succ
== NULL
)
765 make_single_succ_edge (BASIC_BLOCK (x
), EXIT_BLOCK_PTR
, EDGE_FAKE
);
768 /* This function adds a fake edge between any infinite loops to the
769 exit block. Some optimizations require a path from each node to
772 See also Morgan, Figure 3.10, pp. 82-83.
774 The current implementation is ugly, not attempting to minimize the
775 number of inserted fake edges. To reduce the number of fake edges
776 to insert, add fake edges from _innermost_ loops containing only
777 nodes not reachable from the exit block. */
780 connect_infinite_loops_to_exit ()
782 basic_block unvisited_block
;
784 /* Perform depth-first search in the reverse graph to find nodes
785 reachable from the exit block. */
786 struct depth_first_search_dsS dfs_ds
;
788 flow_dfs_compute_reverse_init (&dfs_ds
);
789 flow_dfs_compute_reverse_add_bb (&dfs_ds
, EXIT_BLOCK_PTR
);
791 /* Repeatedly add fake edges, updating the unreachable nodes. */
794 unvisited_block
= flow_dfs_compute_reverse_execute (&dfs_ds
);
795 if (!unvisited_block
)
797 make_edge (unvisited_block
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
798 flow_dfs_compute_reverse_add_bb (&dfs_ds
, unvisited_block
);
801 flow_dfs_compute_reverse_finish (&dfs_ds
);
806 /* Compute reverse top sort order */
808 flow_reverse_top_sort_order_compute (rts_order
)
816 /* Allocate stack for back-tracking up CFG. */
817 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
820 /* Allocate bitmap to track nodes that have been visited. */
821 visited
= sbitmap_alloc (n_basic_blocks
);
823 /* None of the nodes in the CFG have been visited yet. */
824 sbitmap_zero (visited
);
826 /* Push the first edge on to the stack. */
827 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
835 /* Look at the edge on the top of the stack. */
840 /* Check if the edge destination has been visited yet. */
841 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
843 /* Mark that we have visited the destination. */
844 SET_BIT (visited
, dest
->index
);
848 /* Since the DEST node has been visited for the first
849 time, check its successors. */
850 stack
[sp
++] = dest
->succ
;
853 rts_order
[postnum
++] = dest
->index
;
857 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
858 rts_order
[postnum
++] = src
->index
;
861 stack
[sp
- 1] = e
->succ_next
;
868 sbitmap_free (visited
);
871 /* Compute the depth first search order and store in the array
872 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
873 RC_ORDER is non-zero, return the reverse completion number for each
874 node. Returns the number of nodes visited. A depth first search
875 tries to get as far away from the starting point as quickly as
879 flow_depth_first_order_compute (dfs_order
, rc_order
)
886 int rcnum
= n_basic_blocks
- 1;
889 /* Allocate stack for back-tracking up CFG. */
890 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
893 /* Allocate bitmap to track nodes that have been visited. */
894 visited
= sbitmap_alloc (n_basic_blocks
);
896 /* None of the nodes in the CFG have been visited yet. */
897 sbitmap_zero (visited
);
899 /* Push the first edge on to the stack. */
900 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
908 /* Look at the edge on the top of the stack. */
913 /* Check if the edge destination has been visited yet. */
914 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
916 /* Mark that we have visited the destination. */
917 SET_BIT (visited
, dest
->index
);
920 dfs_order
[dfsnum
++] = dest
->index
;
924 /* Since the DEST node has been visited for the first
925 time, check its successors. */
926 stack
[sp
++] = dest
->succ
;
930 /* There are no successors for the DEST node so assign
931 its reverse completion number. */
933 rc_order
[rcnum
--] = dest
->index
;
938 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
940 /* There are no more successors for the SRC node
941 so assign its reverse completion number. */
943 rc_order
[rcnum
--] = src
->index
;
947 stack
[sp
- 1] = e
->succ_next
;
954 sbitmap_free (visited
);
956 /* The number of nodes visited should not be greater than
958 if (dfsnum
> n_basic_blocks
)
961 /* There are some nodes left in the CFG that are unreachable. */
962 if (dfsnum
< n_basic_blocks
)
967 /* Compute the depth first search order on the _reverse_ graph and
968 store in the array DFS_ORDER, marking the nodes visited in VISITED.
969 Returns the number of nodes visited.
971 The computation is split into three pieces:
973 flow_dfs_compute_reverse_init () creates the necessary data
976 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
977 structures. The block will start the search.
979 flow_dfs_compute_reverse_execute () continues (or starts) the
980 search using the block on the top of the stack, stopping when the
983 flow_dfs_compute_reverse_finish () destroys the necessary data
986 Thus, the user will probably call ..._init(), call ..._add_bb() to
987 add a beginning basic block to the stack, call ..._execute(),
988 possibly add another bb to the stack and again call ..._execute(),
989 ..., and finally call _finish(). */
991 /* Initialize the data structures used for depth-first search on the
992 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
993 added to the basic block stack. DATA is the current depth-first
994 search context. If INITIALIZE_STACK is non-zero, there is an
995 element on the stack. */
998 flow_dfs_compute_reverse_init (data
)
999 depth_first_search_ds data
;
1001 /* Allocate stack for back-tracking up CFG. */
1003 (basic_block
*) xmalloc ((n_basic_blocks
- (INVALID_BLOCK
+ 1))
1004 * sizeof (basic_block
));
1007 /* Allocate bitmap to track nodes that have been visited. */
1008 data
->visited_blocks
= sbitmap_alloc (n_basic_blocks
- (INVALID_BLOCK
+ 1));
1010 /* None of the nodes in the CFG have been visited yet. */
1011 sbitmap_zero (data
->visited_blocks
);
1016 /* Add the specified basic block to the top of the dfs data
1017 structures. When the search continues, it will start at the
1021 flow_dfs_compute_reverse_add_bb (data
, bb
)
1022 depth_first_search_ds data
;
1025 data
->stack
[data
->sp
++] = bb
;
1029 /* Continue the depth-first search through the reverse graph starting
1030 with the block at the stack's top and ending when the stack is
1031 empty. Visited nodes are marked. Returns an unvisited basic
1032 block, or NULL if there is none available. */
1035 flow_dfs_compute_reverse_execute (data
)
1036 depth_first_search_ds data
;
1042 while (data
->sp
> 0)
1044 bb
= data
->stack
[--data
->sp
];
1046 /* Mark that we have visited this node. */
1047 if (!TEST_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1)))
1049 SET_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1));
1051 /* Perform depth-first search on adjacent vertices. */
1052 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
1053 flow_dfs_compute_reverse_add_bb (data
, e
->src
);
1057 /* Determine if there are unvisited basic blocks. */
1058 for (i
= n_basic_blocks
- (INVALID_BLOCK
+ 1); --i
>= 0;)
1059 if (!TEST_BIT (data
->visited_blocks
, i
))
1060 return BASIC_BLOCK (i
+ (INVALID_BLOCK
+ 1));
1064 /* Destroy the data structures needed for depth-first search on the
1068 flow_dfs_compute_reverse_finish (data
)
1069 depth_first_search_ds data
;
1072 sbitmap_free (data
->visited_blocks
);