re PR target/8343 ([m68k] [3.2 regression] m68k-elf/rtems ICE at instantiate_virtual_...
[official-gcc.git] / gcc / cfganal.c
bloba7878178fbd7fbb31266f9dd47830fced72199f0
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "toplev.h"
33 #include "tm_p.h"
35 /* Store the data structures necessary for depth-first search. */
36 struct depth_first_search_dsS {
37 /* stack for backtracking during the algorithm */
38 basic_block *stack;
40 /* number of edges in the stack. That is, positions 0, ..., sp-1
41 have edges. */
42 unsigned int sp;
44 /* record of basic blocks already seen by depth-first search */
45 sbitmap visited_blocks;
47 typedef struct depth_first_search_dsS *depth_first_search_ds;
49 static void flow_dfs_compute_reverse_init
50 PARAMS ((depth_first_search_ds));
51 static void flow_dfs_compute_reverse_add_bb
52 PARAMS ((depth_first_search_ds, basic_block));
53 static basic_block flow_dfs_compute_reverse_execute
54 PARAMS ((depth_first_search_ds));
55 static void flow_dfs_compute_reverse_finish
56 PARAMS ((depth_first_search_ds));
57 static void remove_fake_successors PARAMS ((basic_block));
58 static bool need_fake_edge_p PARAMS ((rtx));
59 static bool flow_active_insn_p PARAMS ((rtx));
61 /* Like active_insn_p, except keep the return value clobber around
62 even after reload. */
64 static bool
65 flow_active_insn_p (insn)
66 rtx insn;
68 if (active_insn_p (insn))
69 return true;
71 /* A clobber of the function return value exists for buggy
72 programs that fail to return a value. Its effect is to
73 keep the return value from being live across the entire
74 function. If we allow it to be skipped, we introduce the
75 possibility for register livetime aborts. */
76 if (GET_CODE (PATTERN (insn)) == CLOBBER
77 && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
78 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
79 return true;
81 return false;
84 /* Return true if the block has no effect and only forwards control flow to
85 its single destination. */
87 bool
88 forwarder_block_p (bb)
89 basic_block bb;
91 rtx insn;
93 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94 || !bb->succ || bb->succ->succ_next)
95 return false;
97 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
98 if (INSN_P (insn) && flow_active_insn_p (insn))
99 return false;
101 return (!INSN_P (insn)
102 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
103 || !flow_active_insn_p (insn));
106 /* Return nonzero if we can reach target from src by falling through. */
108 bool
109 can_fallthru (src, target)
110 basic_block src, target;
112 rtx insn = src->end;
113 rtx insn2 = target->head;
115 if (src->next_bb != target)
116 return 0;
118 if (!active_insn_p (insn2))
119 insn2 = next_active_insn (insn2);
121 /* ??? Later we may add code to move jump tables offline. */
122 return next_active_insn (insn) == insn2;
125 /* Mark the back edges in DFS traversal.
126 Return nonzero if a loop (natural or otherwise) is present.
127 Inspired by Depth_First_Search_PP described in:
129 Advanced Compiler Design and Implementation
130 Steven Muchnick
131 Morgan Kaufmann, 1997
133 and heavily borrowed from flow_depth_first_order_compute. */
135 bool
136 mark_dfs_back_edges ()
138 edge *stack;
139 int *pre;
140 int *post;
141 int sp;
142 int prenum = 1;
143 int postnum = 1;
144 sbitmap visited;
145 bool found = false;
147 /* Allocate the preorder and postorder number arrays. */
148 pre = (int *) xcalloc (last_basic_block, sizeof (int));
149 post = (int *) xcalloc (last_basic_block, sizeof (int));
151 /* Allocate stack for back-tracking up CFG. */
152 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
153 sp = 0;
155 /* Allocate bitmap to track nodes that have been visited. */
156 visited = sbitmap_alloc (last_basic_block);
158 /* None of the nodes in the CFG have been visited yet. */
159 sbitmap_zero (visited);
161 /* Push the first edge on to the stack. */
162 stack[sp++] = ENTRY_BLOCK_PTR->succ;
164 while (sp)
166 edge e;
167 basic_block src;
168 basic_block dest;
170 /* Look at the edge on the top of the stack. */
171 e = stack[sp - 1];
172 src = e->src;
173 dest = e->dest;
174 e->flags &= ~EDGE_DFS_BACK;
176 /* Check if the edge destination has been visited yet. */
177 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
179 /* Mark that we have visited the destination. */
180 SET_BIT (visited, dest->index);
182 pre[dest->index] = prenum++;
183 if (dest->succ)
185 /* Since the DEST node has been visited for the first
186 time, check its successors. */
187 stack[sp++] = dest->succ;
189 else
190 post[dest->index] = postnum++;
192 else
194 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
195 && pre[src->index] >= pre[dest->index]
196 && post[dest->index] == 0)
197 e->flags |= EDGE_DFS_BACK, found = true;
199 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
200 post[src->index] = postnum++;
202 if (e->succ_next)
203 stack[sp - 1] = e->succ_next;
204 else
205 sp--;
209 free (pre);
210 free (post);
211 free (stack);
212 sbitmap_free (visited);
214 return found;
217 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
219 void
220 set_edge_can_fallthru_flag ()
222 basic_block bb;
224 FOR_EACH_BB (bb)
226 edge e;
228 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
229 for (e = bb->succ; e; e = e->succ_next)
230 if (e->flags & EDGE_FALLTHRU)
231 e->flags |= EDGE_CAN_FALLTHRU;
233 /* If the BB ends with an invertable condjump all (2) edges are
234 CAN_FALLTHRU edges. */
235 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
236 continue;
237 if (!any_condjump_p (bb->end))
238 continue;
239 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
240 continue;
241 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
242 bb->succ->flags |= EDGE_CAN_FALLTHRU;
243 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
247 /* Return true if we need to add fake edge to exit.
248 Helper function for the flow_call_edges_add. */
250 static bool
251 need_fake_edge_p (insn)
252 rtx insn;
254 if (!INSN_P (insn))
255 return false;
257 if ((GET_CODE (insn) == CALL_INSN
258 && !SIBLING_CALL_P (insn)
259 && !find_reg_note (insn, REG_NORETURN, NULL)
260 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
261 && !CONST_OR_PURE_CALL_P (insn)))
262 return true;
264 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
265 && MEM_VOLATILE_P (PATTERN (insn)))
266 || (GET_CODE (PATTERN (insn)) == PARALLEL
267 && asm_noperands (insn) != -1
268 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
269 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
272 /* Add fake edges to the function exit for any non constant and non noreturn
273 calls, volatile inline assembly in the bitmap of blocks specified by
274 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
275 that were split.
277 The goal is to expose cases in which entering a basic block does not imply
278 that all subsequent instructions must be executed. */
281 flow_call_edges_add (blocks)
282 sbitmap blocks;
284 int i;
285 int blocks_split = 0;
286 int last_bb = last_basic_block;
287 bool check_last_block = false;
289 if (n_basic_blocks == 0)
290 return 0;
292 if (! blocks)
293 check_last_block = true;
294 else
295 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
297 /* In the last basic block, before epilogue generation, there will be
298 a fallthru edge to EXIT. Special care is required if the last insn
299 of the last basic block is a call because make_edge folds duplicate
300 edges, which would result in the fallthru edge also being marked
301 fake, which would result in the fallthru edge being removed by
302 remove_fake_edges, which would result in an invalid CFG.
304 Moreover, we can't elide the outgoing fake edge, since the block
305 profiler needs to take this into account in order to solve the minimal
306 spanning tree in the case that the call doesn't return.
308 Handle this by adding a dummy instruction in a new last basic block. */
309 if (check_last_block)
311 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
312 rtx insn = bb->end;
314 /* Back up past insns that must be kept in the same block as a call. */
315 while (insn != bb->head
316 && keep_with_call_p (insn))
317 insn = PREV_INSN (insn);
319 if (need_fake_edge_p (insn))
321 edge e;
323 for (e = bb->succ; e; e = e->succ_next)
324 if (e->dest == EXIT_BLOCK_PTR)
326 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
327 commit_edge_insertions ();
328 break;
333 /* Now add fake edges to the function exit for any non constant
334 calls since there is no way that we can determine if they will
335 return or not... */
337 for (i = 0; i < last_bb; i++)
339 basic_block bb = BASIC_BLOCK (i);
340 rtx insn;
341 rtx prev_insn;
343 if (!bb)
344 continue;
346 if (blocks && !TEST_BIT (blocks, i))
347 continue;
349 for (insn = bb->end; ; insn = prev_insn)
351 prev_insn = PREV_INSN (insn);
352 if (need_fake_edge_p (insn))
354 edge e;
355 rtx split_at_insn = insn;
357 /* Don't split the block between a call and an insn that should
358 remain in the same block as the call. */
359 if (GET_CODE (insn) == CALL_INSN)
360 while (split_at_insn != bb->end
361 && keep_with_call_p (NEXT_INSN (split_at_insn)))
362 split_at_insn = NEXT_INSN (split_at_insn);
364 /* The handling above of the final block before the epilogue
365 should be enough to verify that there is no edge to the exit
366 block in CFG already. Calling make_edge in such case would
367 cause us to mark that edge as fake and remove it later. */
369 #ifdef ENABLE_CHECKING
370 if (split_at_insn == bb->end)
371 for (e = bb->succ; e; e = e->succ_next)
372 if (e->dest == EXIT_BLOCK_PTR)
373 abort ();
374 #endif
376 /* Note that the following may create a new basic block
377 and renumber the existing basic blocks. */
378 if (split_at_insn != bb->end)
380 e = split_block (bb, split_at_insn);
381 if (e)
382 blocks_split++;
385 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
388 if (insn == bb->head)
389 break;
393 if (blocks_split)
394 verify_flow_info ();
396 return blocks_split;
399 /* Find unreachable blocks. An unreachable block will have 0 in
400 the reachable bit in block->flags. A nonzero value indicates the
401 block is reachable. */
403 void
404 find_unreachable_blocks ()
406 edge e;
407 basic_block *tos, *worklist, bb;
409 tos = worklist =
410 (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
412 /* Clear all the reachability flags. */
414 FOR_EACH_BB (bb)
415 bb->flags &= ~BB_REACHABLE;
417 /* Add our starting points to the worklist. Almost always there will
418 be only one. It isn't inconceivable that we might one day directly
419 support Fortran alternate entry points. */
421 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
423 *tos++ = e->dest;
425 /* Mark the block reachable. */
426 e->dest->flags |= BB_REACHABLE;
429 /* Iterate: find everything reachable from what we've already seen. */
431 while (tos != worklist)
433 basic_block b = *--tos;
435 for (e = b->succ; e; e = e->succ_next)
436 if (!(e->dest->flags & BB_REACHABLE))
438 *tos++ = e->dest;
439 e->dest->flags |= BB_REACHABLE;
443 free (worklist);
446 /* Functions to access an edge list with a vector representation.
447 Enough data is kept such that given an index number, the
448 pred and succ that edge represents can be determined, or
449 given a pred and a succ, its index number can be returned.
450 This allows algorithms which consume a lot of memory to
451 represent the normally full matrix of edge (pred,succ) with a
452 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
453 wasted space in the client code due to sparse flow graphs. */
455 /* This functions initializes the edge list. Basically the entire
456 flowgraph is processed, and all edges are assigned a number,
457 and the data structure is filled in. */
459 struct edge_list *
460 create_edge_list ()
462 struct edge_list *elist;
463 edge e;
464 int num_edges;
465 int block_count;
466 basic_block bb;
468 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
470 num_edges = 0;
472 /* Determine the number of edges in the flow graph by counting successor
473 edges on each basic block. */
474 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
476 for (e = bb->succ; e; e = e->succ_next)
477 num_edges++;
480 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
481 elist->num_blocks = block_count;
482 elist->num_edges = num_edges;
483 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
485 num_edges = 0;
487 /* Follow successors of blocks, and register these edges. */
488 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
489 for (e = bb->succ; e; e = e->succ_next)
490 elist->index_to_edge[num_edges++] = e;
492 return elist;
495 /* This function free's memory associated with an edge list. */
497 void
498 free_edge_list (elist)
499 struct edge_list *elist;
501 if (elist)
503 free (elist->index_to_edge);
504 free (elist);
508 /* This function provides debug output showing an edge list. */
510 void
511 print_edge_list (f, elist)
512 FILE *f;
513 struct edge_list *elist;
515 int x;
517 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
518 elist->num_blocks - 2, elist->num_edges);
520 for (x = 0; x < elist->num_edges; x++)
522 fprintf (f, " %-4d - edge(", x);
523 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
524 fprintf (f, "entry,");
525 else
526 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
528 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
529 fprintf (f, "exit)\n");
530 else
531 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
535 /* This function provides an internal consistency check of an edge list,
536 verifying that all edges are present, and that there are no
537 extra edges. */
539 void
540 verify_edge_list (f, elist)
541 FILE *f;
542 struct edge_list *elist;
544 int pred, succ, index;
545 edge e;
546 basic_block bb, p, s;
548 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
550 for (e = bb->succ; e; e = e->succ_next)
552 pred = e->src->index;
553 succ = e->dest->index;
554 index = EDGE_INDEX (elist, e->src, e->dest);
555 if (index == EDGE_INDEX_NO_EDGE)
557 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
558 continue;
561 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
562 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
563 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
564 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
565 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
566 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
570 /* We've verified that all the edges are in the list, now lets make sure
571 there are no spurious edges in the list. */
573 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
574 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
576 int found_edge = 0;
578 for (e = p->succ; e; e = e->succ_next)
579 if (e->dest == s)
581 found_edge = 1;
582 break;
585 for (e = s->pred; e; e = e->pred_next)
586 if (e->src == p)
588 found_edge = 1;
589 break;
592 if (EDGE_INDEX (elist, p, s)
593 == EDGE_INDEX_NO_EDGE && found_edge != 0)
594 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
595 p->index, s->index);
596 if (EDGE_INDEX (elist, p, s)
597 != EDGE_INDEX_NO_EDGE && found_edge == 0)
598 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
599 p->index, s->index, EDGE_INDEX (elist, p, s));
603 /* This routine will determine what, if any, edge there is between
604 a specified predecessor and successor. */
607 find_edge_index (edge_list, pred, succ)
608 struct edge_list *edge_list;
609 basic_block pred, succ;
611 int x;
613 for (x = 0; x < NUM_EDGES (edge_list); x++)
614 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
615 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
616 return x;
618 return (EDGE_INDEX_NO_EDGE);
621 /* Dump the list of basic blocks in the bitmap NODES. */
623 void
624 flow_nodes_print (str, nodes, file)
625 const char *str;
626 const sbitmap nodes;
627 FILE *file;
629 int node;
631 if (! nodes)
632 return;
634 fprintf (file, "%s { ", str);
635 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
636 fputs ("}\n", file);
639 /* Dump the list of edges in the array EDGE_LIST. */
641 void
642 flow_edge_list_print (str, edge_list, num_edges, file)
643 const char *str;
644 const edge *edge_list;
645 int num_edges;
646 FILE *file;
648 int i;
650 if (! edge_list)
651 return;
653 fprintf (file, "%s { ", str);
654 for (i = 0; i < num_edges; i++)
655 fprintf (file, "%d->%d ", edge_list[i]->src->index,
656 edge_list[i]->dest->index);
658 fputs ("}\n", file);
662 /* This routine will remove any fake successor edges for a basic block.
663 When the edge is removed, it is also removed from whatever predecessor
664 list it is in. */
666 static void
667 remove_fake_successors (bb)
668 basic_block bb;
670 edge e;
672 for (e = bb->succ; e;)
674 edge tmp = e;
676 e = e->succ_next;
677 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
678 remove_edge (tmp);
682 /* This routine will remove all fake edges from the flow graph. If
683 we remove all fake successors, it will automatically remove all
684 fake predecessors. */
686 void
687 remove_fake_edges ()
689 basic_block bb;
691 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
692 remove_fake_successors (bb);
695 /* This function will add a fake edge between any block which has no
696 successors, and the exit block. Some data flow equations require these
697 edges to exist. */
699 void
700 add_noreturn_fake_exit_edges ()
702 basic_block bb;
704 FOR_EACH_BB (bb)
705 if (bb->succ == NULL)
706 make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
709 /* This function adds a fake edge between any infinite loops to the
710 exit block. Some optimizations require a path from each node to
711 the exit node.
713 See also Morgan, Figure 3.10, pp. 82-83.
715 The current implementation is ugly, not attempting to minimize the
716 number of inserted fake edges. To reduce the number of fake edges
717 to insert, add fake edges from _innermost_ loops containing only
718 nodes not reachable from the exit block. */
720 void
721 connect_infinite_loops_to_exit ()
723 basic_block unvisited_block;
724 struct depth_first_search_dsS dfs_ds;
726 /* Perform depth-first search in the reverse graph to find nodes
727 reachable from the exit block. */
728 flow_dfs_compute_reverse_init (&dfs_ds);
729 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
731 /* Repeatedly add fake edges, updating the unreachable nodes. */
732 while (1)
734 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
735 if (!unvisited_block)
736 break;
738 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
739 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
742 flow_dfs_compute_reverse_finish (&dfs_ds);
743 return;
746 /* Compute reverse top sort order */
748 void
749 flow_reverse_top_sort_order_compute (rts_order)
750 int *rts_order;
752 edge *stack;
753 int sp;
754 int postnum = 0;
755 sbitmap visited;
757 /* Allocate stack for back-tracking up CFG. */
758 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
759 sp = 0;
761 /* Allocate bitmap to track nodes that have been visited. */
762 visited = sbitmap_alloc (last_basic_block);
764 /* None of the nodes in the CFG have been visited yet. */
765 sbitmap_zero (visited);
767 /* Push the first edge on to the stack. */
768 stack[sp++] = ENTRY_BLOCK_PTR->succ;
770 while (sp)
772 edge e;
773 basic_block src;
774 basic_block dest;
776 /* Look at the edge on the top of the stack. */
777 e = stack[sp - 1];
778 src = e->src;
779 dest = e->dest;
781 /* Check if the edge destination has been visited yet. */
782 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
784 /* Mark that we have visited the destination. */
785 SET_BIT (visited, dest->index);
787 if (dest->succ)
788 /* Since the DEST node has been visited for the first
789 time, check its successors. */
790 stack[sp++] = dest->succ;
791 else
792 rts_order[postnum++] = dest->index;
794 else
796 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
797 rts_order[postnum++] = src->index;
799 if (e->succ_next)
800 stack[sp - 1] = e->succ_next;
801 else
802 sp--;
806 free (stack);
807 sbitmap_free (visited);
810 /* Compute the depth first search order and store in the array
811 DFS_ORDER if nonzero, marking the nodes visited in VISITED. If
812 RC_ORDER is nonzero, return the reverse completion number for each
813 node. Returns the number of nodes visited. A depth first search
814 tries to get as far away from the starting point as quickly as
815 possible. */
818 flow_depth_first_order_compute (dfs_order, rc_order)
819 int *dfs_order;
820 int *rc_order;
822 edge *stack;
823 int sp;
824 int dfsnum = 0;
825 int rcnum = n_basic_blocks - 1;
826 sbitmap visited;
828 /* Allocate stack for back-tracking up CFG. */
829 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
830 sp = 0;
832 /* Allocate bitmap to track nodes that have been visited. */
833 visited = sbitmap_alloc (last_basic_block);
835 /* None of the nodes in the CFG have been visited yet. */
836 sbitmap_zero (visited);
838 /* Push the first edge on to the stack. */
839 stack[sp++] = ENTRY_BLOCK_PTR->succ;
841 while (sp)
843 edge e;
844 basic_block src;
845 basic_block dest;
847 /* Look at the edge on the top of the stack. */
848 e = stack[sp - 1];
849 src = e->src;
850 dest = e->dest;
852 /* Check if the edge destination has been visited yet. */
853 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
855 /* Mark that we have visited the destination. */
856 SET_BIT (visited, dest->index);
858 if (dfs_order)
859 dfs_order[dfsnum] = dest->index;
861 dfsnum++;
863 if (dest->succ)
864 /* Since the DEST node has been visited for the first
865 time, check its successors. */
866 stack[sp++] = dest->succ;
867 else if (rc_order)
868 /* There are no successors for the DEST node so assign
869 its reverse completion number. */
870 rc_order[rcnum--] = dest->index;
872 else
874 if (! e->succ_next && src != ENTRY_BLOCK_PTR
875 && rc_order)
876 /* There are no more successors for the SRC node
877 so assign its reverse completion number. */
878 rc_order[rcnum--] = src->index;
880 if (e->succ_next)
881 stack[sp - 1] = e->succ_next;
882 else
883 sp--;
887 free (stack);
888 sbitmap_free (visited);
890 /* The number of nodes visited should not be greater than
891 n_basic_blocks. */
892 if (dfsnum > n_basic_blocks)
893 abort ();
895 /* There are some nodes left in the CFG that are unreachable. */
896 if (dfsnum < n_basic_blocks)
897 abort ();
899 return dfsnum;
902 struct dfst_node
904 unsigned nnodes;
905 struct dfst_node **node;
906 struct dfst_node *up;
909 /* Compute a preorder transversal ordering such that a sub-tree which
910 is the source of a cross edge appears before the sub-tree which is
911 the destination of the cross edge. This allows for easy detection
912 of all the entry blocks for a loop.
914 The ordering is compute by:
916 1) Generating a depth first spanning tree.
918 2) Walking the resulting tree from right to left. */
920 void
921 flow_preorder_transversal_compute (pot_order)
922 int *pot_order;
924 edge e;
925 edge *stack;
926 int i;
927 int max_successors;
928 int sp;
929 sbitmap visited;
930 struct dfst_node *node;
931 struct dfst_node *dfst;
932 basic_block bb;
934 /* Allocate stack for back-tracking up CFG. */
935 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
936 sp = 0;
938 /* Allocate the tree. */
939 dfst = (struct dfst_node *) xcalloc (last_basic_block,
940 sizeof (struct dfst_node));
942 FOR_EACH_BB (bb)
944 max_successors = 0;
945 for (e = bb->succ; e; e = e->succ_next)
946 max_successors++;
948 dfst[bb->index].node
949 = (max_successors
950 ? (struct dfst_node **) xcalloc (max_successors,
951 sizeof (struct dfst_node *))
952 : NULL);
955 /* Allocate bitmap to track nodes that have been visited. */
956 visited = sbitmap_alloc (last_basic_block);
958 /* None of the nodes in the CFG have been visited yet. */
959 sbitmap_zero (visited);
961 /* Push the first edge on to the stack. */
962 stack[sp++] = ENTRY_BLOCK_PTR->succ;
964 while (sp)
966 basic_block src;
967 basic_block dest;
969 /* Look at the edge on the top of the stack. */
970 e = stack[sp - 1];
971 src = e->src;
972 dest = e->dest;
974 /* Check if the edge destination has been visited yet. */
975 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
977 /* Mark that we have visited the destination. */
978 SET_BIT (visited, dest->index);
980 /* Add the destination to the preorder tree. */
981 if (src != ENTRY_BLOCK_PTR)
983 dfst[src->index].node[dfst[src->index].nnodes++]
984 = &dfst[dest->index];
985 dfst[dest->index].up = &dfst[src->index];
988 if (dest->succ)
989 /* Since the DEST node has been visited for the first
990 time, check its successors. */
991 stack[sp++] = dest->succ;
994 else if (e->succ_next)
995 stack[sp - 1] = e->succ_next;
996 else
997 sp--;
1000 free (stack);
1001 sbitmap_free (visited);
1003 /* Record the preorder transversal order by
1004 walking the tree from right to left. */
1006 i = 0;
1007 node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
1008 pot_order[i++] = 0;
1010 while (node)
1012 if (node->nnodes)
1014 node = node->node[--node->nnodes];
1015 pot_order[i++] = node - dfst;
1017 else
1018 node = node->up;
1021 /* Free the tree. */
1023 for (i = 0; i < last_basic_block; i++)
1024 if (dfst[i].node)
1025 free (dfst[i].node);
1027 free (dfst);
1030 /* Compute the depth first search order on the _reverse_ graph and
1031 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1032 Returns the number of nodes visited.
1034 The computation is split into three pieces:
1036 flow_dfs_compute_reverse_init () creates the necessary data
1037 structures.
1039 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1040 structures. The block will start the search.
1042 flow_dfs_compute_reverse_execute () continues (or starts) the
1043 search using the block on the top of the stack, stopping when the
1044 stack is empty.
1046 flow_dfs_compute_reverse_finish () destroys the necessary data
1047 structures.
1049 Thus, the user will probably call ..._init(), call ..._add_bb() to
1050 add a beginning basic block to the stack, call ..._execute(),
1051 possibly add another bb to the stack and again call ..._execute(),
1052 ..., and finally call _finish(). */
1054 /* Initialize the data structures used for depth-first search on the
1055 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1056 added to the basic block stack. DATA is the current depth-first
1057 search context. If INITIALIZE_STACK is nonzero, there is an
1058 element on the stack. */
1060 static void
1061 flow_dfs_compute_reverse_init (data)
1062 depth_first_search_ds data;
1064 /* Allocate stack for back-tracking up CFG. */
1065 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1066 * sizeof (basic_block));
1067 data->sp = 0;
1069 /* Allocate bitmap to track nodes that have been visited. */
1070 data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1072 /* None of the nodes in the CFG have been visited yet. */
1073 sbitmap_zero (data->visited_blocks);
1075 return;
1078 /* Add the specified basic block to the top of the dfs data
1079 structures. When the search continues, it will start at the
1080 block. */
1082 static void
1083 flow_dfs_compute_reverse_add_bb (data, bb)
1084 depth_first_search_ds data;
1085 basic_block bb;
1087 data->stack[data->sp++] = bb;
1088 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1091 /* Continue the depth-first search through the reverse graph starting with the
1092 block at the stack's top and ending when the stack is empty. Visited nodes
1093 are marked. Returns an unvisited basic block, or NULL if there is none
1094 available. */
1096 static basic_block
1097 flow_dfs_compute_reverse_execute (data)
1098 depth_first_search_ds data;
1100 basic_block bb;
1101 edge e;
1103 while (data->sp > 0)
1105 bb = data->stack[--data->sp];
1107 /* Perform depth-first search on adjacent vertices. */
1108 for (e = bb->pred; e; e = e->pred_next)
1109 if (!TEST_BIT (data->visited_blocks,
1110 e->src->index - (INVALID_BLOCK + 1)))
1111 flow_dfs_compute_reverse_add_bb (data, e->src);
1114 /* Determine if there are unvisited basic blocks. */
1115 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1116 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1117 return bb;
1119 return NULL;
1122 /* Destroy the data structures needed for depth-first search on the
1123 reverse graph. */
1125 static void
1126 flow_dfs_compute_reverse_finish (data)
1127 depth_first_search_ds data;
1129 free (data->stack);
1130 sbitmap_free (data->visited_blocks);
1133 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1134 if REVERSE, go against direction of edges. Returns number of blocks
1135 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1137 dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
1138 basic_block bb;
1139 int reverse;
1140 bool (*predicate) PARAMS ((basic_block, void *));
1141 basic_block *rslt;
1142 int rslt_max;
1143 void *data;
1145 basic_block *st, lbb;
1146 int sp = 0, tv = 0;
1148 st = xcalloc (rslt_max, sizeof (basic_block));
1149 rslt[tv++] = st[sp++] = bb;
1150 bb->flags |= BB_VISITED;
1151 while (sp)
1153 edge e;
1154 lbb = st[--sp];
1155 if (reverse)
1157 for (e = lbb->pred; e; e = e->pred_next)
1158 if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1160 if (tv == rslt_max)
1161 abort ();
1162 rslt[tv++] = st[sp++] = e->src;
1163 e->src->flags |= BB_VISITED;
1166 else
1168 for (e = lbb->succ; e; e = e->succ_next)
1169 if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1171 if (tv == rslt_max)
1172 abort ();
1173 rslt[tv++] = st[sp++] = e->dest;
1174 e->dest->flags |= BB_VISITED;
1178 free (st);
1179 for (sp = 0; sp < tv; sp++)
1180 rslt[sp]->flags &= ~BB_VISITED;
1181 return tv;