2002-12-18 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / cfganal.c
blob170ba447315aaa1258f8f032279f45a48706cb7d
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "toplev.h"
33 #include "tm_p.h"
35 /* Store the data structures necessary for depth-first search. */
36 struct depth_first_search_dsS {
37 /* stack for backtracking during the algorithm */
38 basic_block *stack;
40 /* number of edges in the stack. That is, positions 0, ..., sp-1
41 have edges. */
42 unsigned int sp;
44 /* record of basic blocks already seen by depth-first search */
45 sbitmap visited_blocks;
47 typedef struct depth_first_search_dsS *depth_first_search_ds;
49 static void flow_dfs_compute_reverse_init
50 PARAMS ((depth_first_search_ds));
51 static void flow_dfs_compute_reverse_add_bb
52 PARAMS ((depth_first_search_ds, basic_block));
53 static basic_block flow_dfs_compute_reverse_execute
54 PARAMS ((depth_first_search_ds));
55 static void flow_dfs_compute_reverse_finish
56 PARAMS ((depth_first_search_ds));
57 static void remove_fake_successors PARAMS ((basic_block));
58 static bool need_fake_edge_p PARAMS ((rtx));
59 static bool flow_active_insn_p PARAMS ((rtx));
61 /* Like active_insn_p, except keep the return value clobber around
62 even after reload. */
64 static bool
65 flow_active_insn_p (insn)
66 rtx insn;
68 if (active_insn_p (insn))
69 return true;
71 /* A clobber of the function return value exists for buggy
72 programs that fail to return a value. Its effect is to
73 keep the return value from being live across the entire
74 function. If we allow it to be skipped, we introduce the
75 possibility for register livetime aborts. */
76 if (GET_CODE (PATTERN (insn)) == CLOBBER
77 && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
78 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
79 return true;
81 return false;
84 /* Return true if the block has no effect and only forwards control flow to
85 its single destination. */
87 bool
88 forwarder_block_p (bb)
89 basic_block bb;
91 rtx insn;
93 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94 || !bb->succ || bb->succ->succ_next)
95 return false;
97 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
98 if (INSN_P (insn) && flow_active_insn_p (insn))
99 return false;
101 return (!INSN_P (insn)
102 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
103 || !flow_active_insn_p (insn));
106 /* Return nonzero if we can reach target from src by falling through. */
108 bool
109 can_fallthru (src, target)
110 basic_block src, target;
112 rtx insn = src->end;
113 rtx insn2 = target->head;
115 if (src->next_bb != target)
116 return 0;
118 if (!active_insn_p (insn2))
119 insn2 = next_active_insn (insn2);
121 /* ??? Later we may add code to move jump tables offline. */
122 return next_active_insn (insn) == insn2;
125 /* Mark the back edges in DFS traversal.
126 Return nonzero if a loop (natural or otherwise) is present.
127 Inspired by Depth_First_Search_PP described in:
129 Advanced Compiler Design and Implementation
130 Steven Muchnick
131 Morgan Kaufmann, 1997
133 and heavily borrowed from flow_depth_first_order_compute. */
135 bool
136 mark_dfs_back_edges ()
138 edge *stack;
139 int *pre;
140 int *post;
141 int sp;
142 int prenum = 1;
143 int postnum = 1;
144 sbitmap visited;
145 bool found = false;
147 /* Allocate the preorder and postorder number arrays. */
148 pre = (int *) xcalloc (last_basic_block, sizeof (int));
149 post = (int *) xcalloc (last_basic_block, sizeof (int));
151 /* Allocate stack for back-tracking up CFG. */
152 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
153 sp = 0;
155 /* Allocate bitmap to track nodes that have been visited. */
156 visited = sbitmap_alloc (last_basic_block);
158 /* None of the nodes in the CFG have been visited yet. */
159 sbitmap_zero (visited);
161 /* Push the first edge on to the stack. */
162 stack[sp++] = ENTRY_BLOCK_PTR->succ;
164 while (sp)
166 edge e;
167 basic_block src;
168 basic_block dest;
170 /* Look at the edge on the top of the stack. */
171 e = stack[sp - 1];
172 src = e->src;
173 dest = e->dest;
174 e->flags &= ~EDGE_DFS_BACK;
176 /* Check if the edge destination has been visited yet. */
177 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
179 /* Mark that we have visited the destination. */
180 SET_BIT (visited, dest->index);
182 pre[dest->index] = prenum++;
183 if (dest->succ)
185 /* Since the DEST node has been visited for the first
186 time, check its successors. */
187 stack[sp++] = dest->succ;
189 else
190 post[dest->index] = postnum++;
192 else
194 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
195 && pre[src->index] >= pre[dest->index]
196 && post[dest->index] == 0)
197 e->flags |= EDGE_DFS_BACK, found = true;
199 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
200 post[src->index] = postnum++;
202 if (e->succ_next)
203 stack[sp - 1] = e->succ_next;
204 else
205 sp--;
209 free (pre);
210 free (post);
211 free (stack);
212 sbitmap_free (visited);
214 return found;
217 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
219 void
220 set_edge_can_fallthru_flag ()
222 basic_block bb;
224 FOR_EACH_BB (bb)
226 edge e;
228 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
229 for (e = bb->succ; e; e = e->succ_next)
230 if (e->flags & EDGE_FALLTHRU)
231 e->flags |= EDGE_CAN_FALLTHRU;
233 /* If the BB ends with an invertable condjump all (2) edges are
234 CAN_FALLTHRU edges. */
235 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
236 continue;
237 if (!any_condjump_p (bb->end))
238 continue;
239 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
240 continue;
241 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
242 bb->succ->flags |= EDGE_CAN_FALLTHRU;
243 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
247 /* Return true if we need to add fake edge to exit.
248 Helper function for the flow_call_edges_add. */
250 static bool
251 need_fake_edge_p (insn)
252 rtx insn;
254 if (!INSN_P (insn))
255 return false;
257 if ((GET_CODE (insn) == CALL_INSN
258 && !SIBLING_CALL_P (insn)
259 && !find_reg_note (insn, REG_NORETURN, NULL)
260 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
261 && !CONST_OR_PURE_CALL_P (insn)))
262 return true;
264 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
265 && MEM_VOLATILE_P (PATTERN (insn)))
266 || (GET_CODE (PATTERN (insn)) == PARALLEL
267 && asm_noperands (insn) != -1
268 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
269 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
272 /* Add fake edges to the function exit for any non constant and non noreturn
273 calls, volatile inline assembly in the bitmap of blocks specified by
274 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
275 that were split.
277 The goal is to expose cases in which entering a basic block does not imply
278 that all subsequent instructions must be executed. */
281 flow_call_edges_add (blocks)
282 sbitmap blocks;
284 int i;
285 int blocks_split = 0;
286 int last_bb = last_basic_block;
287 bool check_last_block = false;
289 if (n_basic_blocks == 0)
290 return 0;
292 if (! blocks)
293 check_last_block = true;
294 else
295 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
297 /* In the last basic block, before epilogue generation, there will be
298 a fallthru edge to EXIT. Special care is required if the last insn
299 of the last basic block is a call because make_edge folds duplicate
300 edges, which would result in the fallthru edge also being marked
301 fake, which would result in the fallthru edge being removed by
302 remove_fake_edges, which would result in an invalid CFG.
304 Moreover, we can't elide the outgoing fake edge, since the block
305 profiler needs to take this into account in order to solve the minimal
306 spanning tree in the case that the call doesn't return.
308 Handle this by adding a dummy instruction in a new last basic block. */
309 if (check_last_block)
311 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
312 rtx insn = bb->end;
314 /* Back up past insns that must be kept in the same block as a call. */
315 while (insn != bb->head
316 && keep_with_call_p (insn))
317 insn = PREV_INSN (insn);
319 if (need_fake_edge_p (insn))
321 edge e;
323 for (e = bb->succ; e; e = e->succ_next)
324 if (e->dest == EXIT_BLOCK_PTR)
325 break;
327 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
328 commit_edge_insertions ();
332 /* Now add fake edges to the function exit for any non constant
333 calls since there is no way that we can determine if they will
334 return or not... */
336 for (i = 0; i < last_bb; i++)
338 basic_block bb = BASIC_BLOCK (i);
339 rtx insn;
340 rtx prev_insn;
342 if (!bb)
343 continue;
345 if (blocks && !TEST_BIT (blocks, i))
346 continue;
348 for (insn = bb->end; ; insn = prev_insn)
350 prev_insn = PREV_INSN (insn);
351 if (need_fake_edge_p (insn))
353 edge e;
354 rtx split_at_insn = insn;
356 /* Don't split the block between a call and an insn that should
357 remain in the same block as the call. */
358 if (GET_CODE (insn) == CALL_INSN)
359 while (split_at_insn != bb->end
360 && keep_with_call_p (NEXT_INSN (split_at_insn)))
361 split_at_insn = NEXT_INSN (split_at_insn);
363 /* The handling above of the final block before the epilogue
364 should be enough to verify that there is no edge to the exit
365 block in CFG already. Calling make_edge in such case would
366 cause us to mark that edge as fake and remove it later. */
368 #ifdef ENABLE_CHECKING
369 if (split_at_insn == bb->end)
370 for (e = bb->succ; e; e = e->succ_next)
371 if (e->dest == EXIT_BLOCK_PTR)
372 abort ();
373 #endif
375 /* Note that the following may create a new basic block
376 and renumber the existing basic blocks. */
377 if (split_at_insn != bb->end)
379 e = split_block (bb, split_at_insn);
380 if (e)
381 blocks_split++;
384 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
387 if (insn == bb->head)
388 break;
392 if (blocks_split)
393 verify_flow_info ();
395 return blocks_split;
398 /* Find unreachable blocks. An unreachable block will have 0 in
399 the reachable bit in block->flags. A nonzero value indicates the
400 block is reachable. */
402 void
403 find_unreachable_blocks ()
405 edge e;
406 basic_block *tos, *worklist, bb;
408 tos = worklist =
409 (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
411 /* Clear all the reachability flags. */
413 FOR_EACH_BB (bb)
414 bb->flags &= ~BB_REACHABLE;
416 /* Add our starting points to the worklist. Almost always there will
417 be only one. It isn't inconceivable that we might one day directly
418 support Fortran alternate entry points. */
420 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
422 *tos++ = e->dest;
424 /* Mark the block reachable. */
425 e->dest->flags |= BB_REACHABLE;
428 /* Iterate: find everything reachable from what we've already seen. */
430 while (tos != worklist)
432 basic_block b = *--tos;
434 for (e = b->succ; e; e = e->succ_next)
435 if (!(e->dest->flags & BB_REACHABLE))
437 *tos++ = e->dest;
438 e->dest->flags |= BB_REACHABLE;
442 free (worklist);
445 /* Functions to access an edge list with a vector representation.
446 Enough data is kept such that given an index number, the
447 pred and succ that edge represents can be determined, or
448 given a pred and a succ, its index number can be returned.
449 This allows algorithms which consume a lot of memory to
450 represent the normally full matrix of edge (pred,succ) with a
451 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
452 wasted space in the client code due to sparse flow graphs. */
454 /* This functions initializes the edge list. Basically the entire
455 flowgraph is processed, and all edges are assigned a number,
456 and the data structure is filled in. */
458 struct edge_list *
459 create_edge_list ()
461 struct edge_list *elist;
462 edge e;
463 int num_edges;
464 int block_count;
465 basic_block bb;
467 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
469 num_edges = 0;
471 /* Determine the number of edges in the flow graph by counting successor
472 edges on each basic block. */
473 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
475 for (e = bb->succ; e; e = e->succ_next)
476 num_edges++;
479 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
480 elist->num_blocks = block_count;
481 elist->num_edges = num_edges;
482 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
484 num_edges = 0;
486 /* Follow successors of blocks, and register these edges. */
487 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
488 for (e = bb->succ; e; e = e->succ_next)
489 elist->index_to_edge[num_edges++] = e;
491 return elist;
494 /* This function free's memory associated with an edge list. */
496 void
497 free_edge_list (elist)
498 struct edge_list *elist;
500 if (elist)
502 free (elist->index_to_edge);
503 free (elist);
507 /* This function provides debug output showing an edge list. */
509 void
510 print_edge_list (f, elist)
511 FILE *f;
512 struct edge_list *elist;
514 int x;
516 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
517 elist->num_blocks - 2, elist->num_edges);
519 for (x = 0; x < elist->num_edges; x++)
521 fprintf (f, " %-4d - edge(", x);
522 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
523 fprintf (f, "entry,");
524 else
525 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
527 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
528 fprintf (f, "exit)\n");
529 else
530 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
534 /* This function provides an internal consistency check of an edge list,
535 verifying that all edges are present, and that there are no
536 extra edges. */
538 void
539 verify_edge_list (f, elist)
540 FILE *f;
541 struct edge_list *elist;
543 int pred, succ, index;
544 edge e;
545 basic_block bb, p, s;
547 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
549 for (e = bb->succ; e; e = e->succ_next)
551 pred = e->src->index;
552 succ = e->dest->index;
553 index = EDGE_INDEX (elist, e->src, e->dest);
554 if (index == EDGE_INDEX_NO_EDGE)
556 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
557 continue;
560 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
561 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
562 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
563 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
564 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
565 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
569 /* We've verified that all the edges are in the list, now lets make sure
570 there are no spurious edges in the list. */
572 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
573 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
575 int found_edge = 0;
577 for (e = p->succ; e; e = e->succ_next)
578 if (e->dest == s)
580 found_edge = 1;
581 break;
584 for (e = s->pred; e; e = e->pred_next)
585 if (e->src == p)
587 found_edge = 1;
588 break;
591 if (EDGE_INDEX (elist, p, s)
592 == EDGE_INDEX_NO_EDGE && found_edge != 0)
593 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
594 p->index, s->index);
595 if (EDGE_INDEX (elist, p, s)
596 != EDGE_INDEX_NO_EDGE && found_edge == 0)
597 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
598 p->index, s->index, EDGE_INDEX (elist, p, s));
602 /* This routine will determine what, if any, edge there is between
603 a specified predecessor and successor. */
606 find_edge_index (edge_list, pred, succ)
607 struct edge_list *edge_list;
608 basic_block pred, succ;
610 int x;
612 for (x = 0; x < NUM_EDGES (edge_list); x++)
613 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
614 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
615 return x;
617 return (EDGE_INDEX_NO_EDGE);
620 /* Dump the list of basic blocks in the bitmap NODES. */
622 void
623 flow_nodes_print (str, nodes, file)
624 const char *str;
625 const sbitmap nodes;
626 FILE *file;
628 int node;
630 if (! nodes)
631 return;
633 fprintf (file, "%s { ", str);
634 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
635 fputs ("}\n", file);
638 /* Dump the list of edges in the array EDGE_LIST. */
640 void
641 flow_edge_list_print (str, edge_list, num_edges, file)
642 const char *str;
643 const edge *edge_list;
644 int num_edges;
645 FILE *file;
647 int i;
649 if (! edge_list)
650 return;
652 fprintf (file, "%s { ", str);
653 for (i = 0; i < num_edges; i++)
654 fprintf (file, "%d->%d ", edge_list[i]->src->index,
655 edge_list[i]->dest->index);
657 fputs ("}\n", file);
661 /* This routine will remove any fake successor edges for a basic block.
662 When the edge is removed, it is also removed from whatever predecessor
663 list it is in. */
665 static void
666 remove_fake_successors (bb)
667 basic_block bb;
669 edge e;
671 for (e = bb->succ; e;)
673 edge tmp = e;
675 e = e->succ_next;
676 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
677 remove_edge (tmp);
681 /* This routine will remove all fake edges from the flow graph. If
682 we remove all fake successors, it will automatically remove all
683 fake predecessors. */
685 void
686 remove_fake_edges ()
688 basic_block bb;
690 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
691 remove_fake_successors (bb);
694 /* This function will add a fake edge between any block which has no
695 successors, and the exit block. Some data flow equations require these
696 edges to exist. */
698 void
699 add_noreturn_fake_exit_edges ()
701 basic_block bb;
703 FOR_EACH_BB (bb)
704 if (bb->succ == NULL)
705 make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
708 /* This function adds a fake edge between any infinite loops to the
709 exit block. Some optimizations require a path from each node to
710 the exit node.
712 See also Morgan, Figure 3.10, pp. 82-83.
714 The current implementation is ugly, not attempting to minimize the
715 number of inserted fake edges. To reduce the number of fake edges
716 to insert, add fake edges from _innermost_ loops containing only
717 nodes not reachable from the exit block. */
719 void
720 connect_infinite_loops_to_exit ()
722 basic_block unvisited_block;
723 struct depth_first_search_dsS dfs_ds;
725 /* Perform depth-first search in the reverse graph to find nodes
726 reachable from the exit block. */
727 flow_dfs_compute_reverse_init (&dfs_ds);
728 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
730 /* Repeatedly add fake edges, updating the unreachable nodes. */
731 while (1)
733 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
734 if (!unvisited_block)
735 break;
737 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
738 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
741 flow_dfs_compute_reverse_finish (&dfs_ds);
742 return;
745 /* Compute reverse top sort order */
747 void
748 flow_reverse_top_sort_order_compute (rts_order)
749 int *rts_order;
751 edge *stack;
752 int sp;
753 int postnum = 0;
754 sbitmap visited;
756 /* Allocate stack for back-tracking up CFG. */
757 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
758 sp = 0;
760 /* Allocate bitmap to track nodes that have been visited. */
761 visited = sbitmap_alloc (last_basic_block);
763 /* None of the nodes in the CFG have been visited yet. */
764 sbitmap_zero (visited);
766 /* Push the first edge on to the stack. */
767 stack[sp++] = ENTRY_BLOCK_PTR->succ;
769 while (sp)
771 edge e;
772 basic_block src;
773 basic_block dest;
775 /* Look at the edge on the top of the stack. */
776 e = stack[sp - 1];
777 src = e->src;
778 dest = e->dest;
780 /* Check if the edge destination has been visited yet. */
781 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
783 /* Mark that we have visited the destination. */
784 SET_BIT (visited, dest->index);
786 if (dest->succ)
787 /* Since the DEST node has been visited for the first
788 time, check its successors. */
789 stack[sp++] = dest->succ;
790 else
791 rts_order[postnum++] = dest->index;
793 else
795 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
796 rts_order[postnum++] = src->index;
798 if (e->succ_next)
799 stack[sp - 1] = e->succ_next;
800 else
801 sp--;
805 free (stack);
806 sbitmap_free (visited);
809 /* Compute the depth first search order and store in the array
810 DFS_ORDER if nonzero, marking the nodes visited in VISITED. If
811 RC_ORDER is nonzero, return the reverse completion number for each
812 node. Returns the number of nodes visited. A depth first search
813 tries to get as far away from the starting point as quickly as
814 possible. */
817 flow_depth_first_order_compute (dfs_order, rc_order)
818 int *dfs_order;
819 int *rc_order;
821 edge *stack;
822 int sp;
823 int dfsnum = 0;
824 int rcnum = n_basic_blocks - 1;
825 sbitmap visited;
827 /* Allocate stack for back-tracking up CFG. */
828 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
829 sp = 0;
831 /* Allocate bitmap to track nodes that have been visited. */
832 visited = sbitmap_alloc (last_basic_block);
834 /* None of the nodes in the CFG have been visited yet. */
835 sbitmap_zero (visited);
837 /* Push the first edge on to the stack. */
838 stack[sp++] = ENTRY_BLOCK_PTR->succ;
840 while (sp)
842 edge e;
843 basic_block src;
844 basic_block dest;
846 /* Look at the edge on the top of the stack. */
847 e = stack[sp - 1];
848 src = e->src;
849 dest = e->dest;
851 /* Check if the edge destination has been visited yet. */
852 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
854 /* Mark that we have visited the destination. */
855 SET_BIT (visited, dest->index);
857 if (dfs_order)
858 dfs_order[dfsnum] = dest->index;
860 dfsnum++;
862 if (dest->succ)
863 /* Since the DEST node has been visited for the first
864 time, check its successors. */
865 stack[sp++] = dest->succ;
866 else if (rc_order)
867 /* There are no successors for the DEST node so assign
868 its reverse completion number. */
869 rc_order[rcnum--] = dest->index;
871 else
873 if (! e->succ_next && src != ENTRY_BLOCK_PTR
874 && rc_order)
875 /* There are no more successors for the SRC node
876 so assign its reverse completion number. */
877 rc_order[rcnum--] = src->index;
879 if (e->succ_next)
880 stack[sp - 1] = e->succ_next;
881 else
882 sp--;
886 free (stack);
887 sbitmap_free (visited);
889 /* The number of nodes visited should not be greater than
890 n_basic_blocks. */
891 if (dfsnum > n_basic_blocks)
892 abort ();
894 /* There are some nodes left in the CFG that are unreachable. */
895 if (dfsnum < n_basic_blocks)
896 abort ();
898 return dfsnum;
901 struct dfst_node
903 unsigned nnodes;
904 struct dfst_node **node;
905 struct dfst_node *up;
908 /* Compute a preorder transversal ordering such that a sub-tree which
909 is the source of a cross edge appears before the sub-tree which is
910 the destination of the cross edge. This allows for easy detection
911 of all the entry blocks for a loop.
913 The ordering is compute by:
915 1) Generating a depth first spanning tree.
917 2) Walking the resulting tree from right to left. */
919 void
920 flow_preorder_transversal_compute (pot_order)
921 int *pot_order;
923 edge e;
924 edge *stack;
925 int i;
926 int max_successors;
927 int sp;
928 sbitmap visited;
929 struct dfst_node *node;
930 struct dfst_node *dfst;
931 basic_block bb;
933 /* Allocate stack for back-tracking up CFG. */
934 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
935 sp = 0;
937 /* Allocate the tree. */
938 dfst = (struct dfst_node *) xcalloc (last_basic_block,
939 sizeof (struct dfst_node));
941 FOR_EACH_BB (bb)
943 max_successors = 0;
944 for (e = bb->succ; e; e = e->succ_next)
945 max_successors++;
947 dfst[bb->index].node
948 = (max_successors
949 ? (struct dfst_node **) xcalloc (max_successors,
950 sizeof (struct dfst_node *))
951 : NULL);
954 /* Allocate bitmap to track nodes that have been visited. */
955 visited = sbitmap_alloc (last_basic_block);
957 /* None of the nodes in the CFG have been visited yet. */
958 sbitmap_zero (visited);
960 /* Push the first edge on to the stack. */
961 stack[sp++] = ENTRY_BLOCK_PTR->succ;
963 while (sp)
965 basic_block src;
966 basic_block dest;
968 /* Look at the edge on the top of the stack. */
969 e = stack[sp - 1];
970 src = e->src;
971 dest = e->dest;
973 /* Check if the edge destination has been visited yet. */
974 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
976 /* Mark that we have visited the destination. */
977 SET_BIT (visited, dest->index);
979 /* Add the destination to the preorder tree. */
980 if (src != ENTRY_BLOCK_PTR)
982 dfst[src->index].node[dfst[src->index].nnodes++]
983 = &dfst[dest->index];
984 dfst[dest->index].up = &dfst[src->index];
987 if (dest->succ)
988 /* Since the DEST node has been visited for the first
989 time, check its successors. */
990 stack[sp++] = dest->succ;
993 else if (e->succ_next)
994 stack[sp - 1] = e->succ_next;
995 else
996 sp--;
999 free (stack);
1000 sbitmap_free (visited);
1002 /* Record the preorder transversal order by
1003 walking the tree from right to left. */
1005 i = 0;
1006 node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
1007 pot_order[i++] = 0;
1009 while (node)
1011 if (node->nnodes)
1013 node = node->node[--node->nnodes];
1014 pot_order[i++] = node - dfst;
1016 else
1017 node = node->up;
1020 /* Free the tree. */
1022 for (i = 0; i < last_basic_block; i++)
1023 if (dfst[i].node)
1024 free (dfst[i].node);
1026 free (dfst);
1029 /* Compute the depth first search order on the _reverse_ graph and
1030 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1031 Returns the number of nodes visited.
1033 The computation is split into three pieces:
1035 flow_dfs_compute_reverse_init () creates the necessary data
1036 structures.
1038 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1039 structures. The block will start the search.
1041 flow_dfs_compute_reverse_execute () continues (or starts) the
1042 search using the block on the top of the stack, stopping when the
1043 stack is empty.
1045 flow_dfs_compute_reverse_finish () destroys the necessary data
1046 structures.
1048 Thus, the user will probably call ..._init(), call ..._add_bb() to
1049 add a beginning basic block to the stack, call ..._execute(),
1050 possibly add another bb to the stack and again call ..._execute(),
1051 ..., and finally call _finish(). */
1053 /* Initialize the data structures used for depth-first search on the
1054 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1055 added to the basic block stack. DATA is the current depth-first
1056 search context. If INITIALIZE_STACK is nonzero, there is an
1057 element on the stack. */
1059 static void
1060 flow_dfs_compute_reverse_init (data)
1061 depth_first_search_ds data;
1063 /* Allocate stack for back-tracking up CFG. */
1064 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1065 * sizeof (basic_block));
1066 data->sp = 0;
1068 /* Allocate bitmap to track nodes that have been visited. */
1069 data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1071 /* None of the nodes in the CFG have been visited yet. */
1072 sbitmap_zero (data->visited_blocks);
1074 return;
1077 /* Add the specified basic block to the top of the dfs data
1078 structures. When the search continues, it will start at the
1079 block. */
1081 static void
1082 flow_dfs_compute_reverse_add_bb (data, bb)
1083 depth_first_search_ds data;
1084 basic_block bb;
1086 data->stack[data->sp++] = bb;
1087 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1090 /* Continue the depth-first search through the reverse graph starting with the
1091 block at the stack's top and ending when the stack is empty. Visited nodes
1092 are marked. Returns an unvisited basic block, or NULL if there is none
1093 available. */
1095 static basic_block
1096 flow_dfs_compute_reverse_execute (data)
1097 depth_first_search_ds data;
1099 basic_block bb;
1100 edge e;
1102 while (data->sp > 0)
1104 bb = data->stack[--data->sp];
1106 /* Perform depth-first search on adjacent vertices. */
1107 for (e = bb->pred; e; e = e->pred_next)
1108 if (!TEST_BIT (data->visited_blocks,
1109 e->src->index - (INVALID_BLOCK + 1)))
1110 flow_dfs_compute_reverse_add_bb (data, e->src);
1113 /* Determine if there are unvisited basic blocks. */
1114 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1115 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1116 return bb;
1118 return NULL;
1121 /* Destroy the data structures needed for depth-first search on the
1122 reverse graph. */
1124 static void
1125 flow_dfs_compute_reverse_finish (data)
1126 depth_first_search_ds data;
1128 free (data->stack);
1129 sbitmap_free (data->visited_blocks);
1132 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1133 if REVERSE, go against direction of edges. Returns number of blocks
1134 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1136 dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
1137 basic_block bb;
1138 int reverse;
1139 bool (*predicate) PARAMS ((basic_block, void *));
1140 basic_block *rslt;
1141 int rslt_max;
1142 void *data;
1144 basic_block *st, lbb;
1145 int sp = 0, tv = 0;
1147 st = xcalloc (rslt_max, sizeof (basic_block));
1148 rslt[tv++] = st[sp++] = bb;
1149 bb->flags |= BB_VISITED;
1150 while (sp)
1152 edge e;
1153 lbb = st[--sp];
1154 if (reverse)
1156 for (e = lbb->pred; e; e = e->pred_next)
1157 if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1159 if (tv == rslt_max)
1160 abort ();
1161 rslt[tv++] = st[sp++] = e->src;
1162 e->src->flags |= BB_VISITED;
1165 else
1167 for (e = lbb->succ; e; e = e->succ_next)
1168 if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1170 if (tv == rslt_max)
1171 abort ();
1172 rslt[tv++] = st[sp++] = e->dest;
1173 e->dest->flags |= BB_VISITED;
1177 free (st);
1178 for (sp = 0; sp < tv; sp++)
1179 rslt[sp]->flags &= ~BB_VISITED;
1180 return tv;