* tree-cfg.c (phi_alternatives_equal): Check that PHI_ARG_DEF
[official-gcc.git] / gcc / tree-cfg.c
blob3e046194114d2017be846444897804aa5c2f2470
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
86 static htab_t edge_to_cases;
88 /* CFG statistics. */
89 struct cfg_stats_d
91 long num_merged_labels;
94 static struct cfg_stats_d cfg_stats;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
99 /* Basic blocks and flowgraphs. */
100 static basic_block create_bb (void *, void *, basic_block);
101 static void create_block_annotation (basic_block);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree);
105 static void factor_computed_gotos (void);
107 /* Edges. */
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block);
110 static void make_exit_edges (basic_block);
111 static void make_cond_expr_edges (basic_block);
112 static void make_switch_expr_edges (basic_block);
113 static void make_goto_expr_edges (basic_block);
114 static edge tree_redirect_edge_and_branch (edge, basic_block);
115 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116 static void split_critical_edges (void);
118 /* Various helpers. */
119 static inline bool stmt_starts_bb_p (tree, tree);
120 static int tree_verify_flow_info (void);
121 static void tree_make_forwarder_block (edge);
122 static bool thread_jumps (void);
123 static bool tree_forwarder_block_p (basic_block);
124 static void tree_cfg2vcg (FILE *);
126 /* Flowgraph optimization and cleanup. */
127 static void tree_merge_blocks (basic_block, basic_block);
128 static bool tree_can_merge_blocks_p (basic_block, basic_block);
129 static void remove_bb (basic_block);
130 static bool cleanup_control_flow (void);
131 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
132 static edge find_taken_edge_cond_expr (basic_block, tree);
133 static edge find_taken_edge_switch_expr (basic_block, tree);
134 static tree find_case_label_for_value (tree, tree);
135 static bool phi_alternatives_equal (basic_block, edge, edge);
138 /*---------------------------------------------------------------------------
139 Create basic blocks
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
145 static void
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 /* Initialize rbi_pool. */
152 alloc_rbi_pool ();
154 /* Initialize the basic block array. */
155 init_flow ();
156 profile_status = PROFILE_ABSENT;
157 n_basic_blocks = 0;
158 last_basic_block = 0;
159 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
160 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
162 /* Build a mapping of labels to their associated blocks. */
163 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
164 "label to block map");
166 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
167 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
169 found_computed_goto = 0;
170 make_blocks (*tp);
172 /* Computed gotos are hell to deal with, especially if there are
173 lots of them with a large number of destinations. So we factor
174 them to a common computed goto location before we build the
175 edge list. After we convert back to normal form, we will un-factor
176 the computed gotos since factoring introduces an unwanted jump. */
177 if (found_computed_goto)
178 factor_computed_gotos ();
180 /* Make sure there is always at least one block, even if it's empty. */
181 if (n_basic_blocks == 0)
182 create_empty_bb (ENTRY_BLOCK_PTR);
184 create_block_annotation (ENTRY_BLOCK_PTR);
185 create_block_annotation (EXIT_BLOCK_PTR);
187 /* Adjust the size of the array. */
188 VARRAY_GROW (basic_block_info, n_basic_blocks);
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
198 /* Create the edges of the flowgraph. */
199 make_edges ();
201 /* Debugging dumps. */
203 /* Write the flowgraph to a VCG file. */
205 int local_dump_flags;
206 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
207 if (dump_file)
209 tree_cfg2vcg (dump_file);
210 dump_end (TDI_vcg, dump_file);
214 /* Dump a textual representation of the flowgraph. */
215 if (dump_file)
216 dump_tree_cfg (dump_file, dump_flags);
219 static void
220 execute_build_cfg (void)
222 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
225 struct tree_opt_pass pass_build_cfg =
227 "cfg", /* name */
228 NULL, /* gate */
229 execute_build_cfg, /* execute */
230 NULL, /* sub */
231 NULL, /* next */
232 0, /* static_pass_number */
233 TV_TREE_CFG, /* tv_id */
234 PROP_gimple_leh, /* properties_required */
235 PROP_cfg, /* properties_provided */
236 0, /* properties_destroyed */
237 0, /* todo_flags_start */
238 TODO_verify_stmts, /* todo_flags_finish */
239 0 /* letter */
242 /* Search the CFG for any computed gotos. If found, factor them to a
243 common computed goto site. Also record the location of that site so
244 that we can un-factor the gotos after we have converted back to
245 normal form. */
247 static void
248 factor_computed_gotos (void)
250 basic_block bb;
251 tree factored_label_decl = NULL;
252 tree var = NULL;
253 tree factored_computed_goto_label = NULL;
254 tree factored_computed_goto = NULL;
256 /* We know there are one or more computed gotos in this function.
257 Examine the last statement in each basic block to see if the block
258 ends with a computed goto. */
260 FOR_EACH_BB (bb)
262 block_stmt_iterator bsi = bsi_last (bb);
263 tree last;
265 if (bsi_end_p (bsi))
266 continue;
267 last = bsi_stmt (bsi);
269 /* Ignore the computed goto we create when we factor the original
270 computed gotos. */
271 if (last == factored_computed_goto)
272 continue;
274 /* If the last statement is a computed goto, factor it. */
275 if (computed_goto_p (last))
277 tree assignment;
279 /* The first time we find a computed goto we need to create
280 the factored goto block and the variable each original
281 computed goto will use for their goto destination. */
282 if (! factored_computed_goto)
284 basic_block new_bb = create_empty_bb (bb);
285 block_stmt_iterator new_bsi = bsi_start (new_bb);
287 /* Create the destination of the factored goto. Each original
288 computed goto will put its desired destination into this
289 variable and jump to the label we create immediately
290 below. */
291 var = create_tmp_var (ptr_type_node, "gotovar");
293 /* Build a label for the new block which will contain the
294 factored computed goto. */
295 factored_label_decl = create_artificial_label ();
296 factored_computed_goto_label
297 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
298 bsi_insert_after (&new_bsi, factored_computed_goto_label,
299 BSI_NEW_STMT);
301 /* Build our new computed goto. */
302 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
303 bsi_insert_after (&new_bsi, factored_computed_goto,
304 BSI_NEW_STMT);
307 /* Copy the original computed goto's destination into VAR. */
308 assignment = build (MODIFY_EXPR, ptr_type_node,
309 var, GOTO_DESTINATION (last));
310 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
312 /* And re-vector the computed goto to the new destination. */
313 GOTO_DESTINATION (last) = factored_label_decl;
319 /* Create annotations for a single basic block. */
321 static void
322 create_block_annotation (basic_block bb)
324 /* Verify that the tree_annotations field is clear. */
325 gcc_assert (!bb->tree_annotations);
326 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
330 /* Free the annotations for all the basic blocks. */
332 static void free_blocks_annotations (void)
334 clear_blocks_annotations ();
338 /* Clear the annotations for all the basic blocks. */
340 static void
341 clear_blocks_annotations (void)
343 basic_block bb;
345 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
346 bb->tree_annotations = NULL;
350 /* Build a flowgraph for the statement_list STMT_LIST. */
352 static void
353 make_blocks (tree stmt_list)
355 tree_stmt_iterator i = tsi_start (stmt_list);
356 tree stmt = NULL;
357 bool start_new_block = true;
358 bool first_stmt_of_list = true;
359 basic_block bb = ENTRY_BLOCK_PTR;
361 while (!tsi_end_p (i))
363 tree prev_stmt;
365 prev_stmt = stmt;
366 stmt = tsi_stmt (i);
368 /* If the statement starts a new basic block or if we have determined
369 in a previous pass that we need to create a new block for STMT, do
370 so now. */
371 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
373 if (!first_stmt_of_list)
374 stmt_list = tsi_split_statement_list_before (&i);
375 bb = create_basic_block (stmt_list, NULL, bb);
376 start_new_block = false;
379 /* Now add STMT to BB and create the subgraphs for special statement
380 codes. */
381 set_bb_for_stmt (stmt, bb);
383 if (computed_goto_p (stmt))
384 found_computed_goto = true;
386 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
387 next iteration. */
388 if (stmt_ends_bb_p (stmt))
389 start_new_block = true;
391 tsi_next (&i);
392 first_stmt_of_list = false;
397 /* Create and return a new empty basic block after bb AFTER. */
399 static basic_block
400 create_bb (void *h, void *e, basic_block after)
402 basic_block bb;
404 gcc_assert (!e);
406 /* Create and initialize a new basic block. Since alloc_block uses
407 ggc_alloc_cleared to allocate a basic block, we do not have to
408 clear the newly allocated basic block here. */
409 bb = alloc_block ();
411 bb->index = last_basic_block;
412 bb->flags = BB_NEW;
413 bb->stmt_list = h ? h : alloc_stmt_list ();
415 /* Add the new block to the linked list of blocks. */
416 link_block (bb, after);
418 /* Grow the basic block array if needed. */
419 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
421 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
422 VARRAY_GROW (basic_block_info, new_size);
425 /* Add the newly created block to the array. */
426 BASIC_BLOCK (last_basic_block) = bb;
428 create_block_annotation (bb);
430 n_basic_blocks++;
431 last_basic_block++;
433 initialize_bb_rbi (bb);
434 return bb;
438 /*---------------------------------------------------------------------------
439 Edge creation
440 ---------------------------------------------------------------------------*/
442 /* Join all the blocks in the flowgraph. */
444 static void
445 make_edges (void)
447 basic_block bb;
449 /* Create an edge from entry to the first block with executable
450 statements in it. */
451 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
453 /* Traverse basic block array placing edges. */
454 FOR_EACH_BB (bb)
456 tree first = first_stmt (bb);
457 tree last = last_stmt (bb);
459 if (first)
461 /* Edges for statements that always alter flow control. */
462 if (is_ctrl_stmt (last))
463 make_ctrl_stmt_edges (bb);
465 /* Edges for statements that sometimes alter flow control. */
466 if (is_ctrl_altering_stmt (last))
467 make_exit_edges (bb);
470 /* Finally, if no edges were created above, this is a regular
471 basic block that only needs a fallthru edge. */
472 if (EDGE_COUNT (bb->succs) == 0)
473 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
476 /* We do not care about fake edges, so remove any that the CFG
477 builder inserted for completeness. */
478 remove_fake_exit_edges ();
480 /* Clean up the graph and warn for unreachable code. */
481 cleanup_tree_cfg ();
485 /* Create edges for control statement at basic block BB. */
487 static void
488 make_ctrl_stmt_edges (basic_block bb)
490 tree last = last_stmt (bb);
492 gcc_assert (last);
493 switch (TREE_CODE (last))
495 case GOTO_EXPR:
496 make_goto_expr_edges (bb);
497 break;
499 case RETURN_EXPR:
500 make_edge (bb, EXIT_BLOCK_PTR, 0);
501 break;
503 case COND_EXPR:
504 make_cond_expr_edges (bb);
505 break;
507 case SWITCH_EXPR:
508 make_switch_expr_edges (bb);
509 break;
511 case RESX_EXPR:
512 make_eh_edges (last);
513 /* Yet another NORETURN hack. */
514 if (EDGE_COUNT (bb->succs) == 0)
515 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
516 break;
518 default:
519 gcc_unreachable ();
524 /* Create exit edges for statements in block BB that alter the flow of
525 control. Statements that alter the control flow are 'goto', 'return'
526 and calls to non-returning functions. */
528 static void
529 make_exit_edges (basic_block bb)
531 tree last = last_stmt (bb), op;
533 gcc_assert (last);
534 switch (TREE_CODE (last))
536 case CALL_EXPR:
537 /* If this function receives a nonlocal goto, then we need to
538 make edges from this call site to all the nonlocal goto
539 handlers. */
540 if (TREE_SIDE_EFFECTS (last)
541 && current_function_has_nonlocal_label)
542 make_goto_expr_edges (bb);
544 /* If this statement has reachable exception handlers, then
545 create abnormal edges to them. */
546 make_eh_edges (last);
548 /* Some calls are known not to return. For such calls we create
549 a fake edge.
551 We really need to revamp how we build edges so that it's not
552 such a bloody pain to avoid creating edges for this case since
553 all we do is remove these edges when we're done building the
554 CFG. */
555 if (call_expr_flags (last) & ECF_NORETURN)
557 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
558 return;
561 /* Don't forget the fall-thru edge. */
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
565 case MODIFY_EXPR:
566 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
567 may have an abnormal edge. Search the RHS for this case and
568 create any required edges. */
569 op = get_call_expr_in (last);
570 if (op && TREE_SIDE_EFFECTS (op)
571 && current_function_has_nonlocal_label)
572 make_goto_expr_edges (bb);
574 make_eh_edges (last);
575 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
576 break;
578 default:
579 gcc_unreachable ();
584 /* Create the edges for a COND_EXPR starting at block BB.
585 At this point, both clauses must contain only simple gotos. */
587 static void
588 make_cond_expr_edges (basic_block bb)
590 tree entry = last_stmt (bb);
591 basic_block then_bb, else_bb;
592 tree then_label, else_label;
594 gcc_assert (entry);
595 gcc_assert (TREE_CODE (entry) == COND_EXPR);
597 /* Entry basic blocks for each component. */
598 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
599 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
600 then_bb = label_to_block (then_label);
601 else_bb = label_to_block (else_label);
603 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
604 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
607 /* Hashing routine for EDGE_TO_CASES. */
609 static hashval_t
610 edge_to_cases_hash (const void *p)
612 edge e = ((struct edge_to_cases_elt *)p)->e;
614 /* Hash on the edge itself (which is a pointer). */
615 return htab_hash_pointer (e);
618 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
619 for equality is just a pointer comparison. */
621 static int
622 edge_to_cases_eq (const void *p1, const void *p2)
624 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
625 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627 return e1 == e2;
630 /* Called for each element in the hash table (P) as we delete the
631 edge to cases hash table.
633 Clear all the TREE_CHAINs to prevent problems with copying of
634 SWITCH_EXPRs and structure sharing rules, then free the hash table
635 element. */
637 static void
638 edge_to_cases_cleanup (void *p)
640 struct edge_to_cases_elt *elt = p;
641 tree t, next;
643 for (t = elt->case_labels; t; t = next)
645 next = TREE_CHAIN (t);
646 TREE_CHAIN (t) = NULL;
648 free (p);
651 /* Start recording information mapping edges to case labels. */
653 static void
654 start_recording_case_labels (void)
656 gcc_assert (edge_to_cases == NULL);
658 edge_to_cases = htab_create (37,
659 edge_to_cases_hash,
660 edge_to_cases_eq,
661 edge_to_cases_cleanup);
664 /* Return nonzero if we are recording information for case labels. */
666 static bool
667 recording_case_labels_p (void)
669 return (edge_to_cases != NULL);
672 /* Stop recording information mapping edges to case labels and
673 remove any information we have recorded. */
674 static void
675 end_recording_case_labels (void)
677 htab_delete (edge_to_cases);
678 edge_to_cases = NULL;
681 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
683 static void
684 record_switch_edge (edge e, tree case_label)
686 struct edge_to_cases_elt *elt;
687 void **slot;
689 /* Build a hash table element so we can see if E is already
690 in the table. */
691 elt = xmalloc (sizeof (struct edge_to_cases_elt));
692 elt->e = e;
693 elt->case_labels = case_label;
695 slot = htab_find_slot (edge_to_cases, elt, INSERT);
697 if (*slot == NULL)
699 /* E was not in the hash table. Install E into the hash table. */
700 *slot = (void *)elt;
702 else
704 /* E was already in the hash table. Free ELT as we do not need it
705 anymore. */
706 free (elt);
708 /* Get the entry stored in the hash table. */
709 elt = (struct edge_to_cases_elt *) *slot;
711 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
712 TREE_CHAIN (case_label) = elt->case_labels;
713 elt->case_labels = case_label;
717 /* If we are inside a {start,end}_recording_cases block, then return
718 a chain of CASE_LABEL_EXPRs from T which reference E.
720 Otherwise return NULL. */
722 static tree
723 get_cases_for_edge (edge e, tree t)
725 struct edge_to_cases_elt elt, *elt_p;
726 void **slot;
727 size_t i, n;
728 tree vec;
730 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
731 chains available. Return NULL so the caller can detect this case. */
732 if (!recording_case_labels_p ())
733 return NULL;
735 restart:
736 elt.e = e;
737 elt.case_labels = NULL;
738 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740 if (slot)
742 elt_p = (struct edge_to_cases_elt *)*slot;
743 return elt_p->case_labels;
746 /* If we did not find E in the hash table, then this must be the first
747 time we have been queried for information about E & T. Add all the
748 elements from T to the hash table then perform the query again. */
750 vec = SWITCH_LABELS (t);
751 n = TREE_VEC_LENGTH (vec);
752 for (i = 0; i < n; i++)
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
756 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758 goto restart;
761 /* Create the edges for a SWITCH_EXPR starting at block BB.
762 At this point, the switch body has been lowered and the
763 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
765 static void
766 make_switch_expr_edges (basic_block bb)
768 tree entry = last_stmt (bb);
769 size_t i, n;
770 tree vec;
772 vec = SWITCH_LABELS (entry);
773 n = TREE_VEC_LENGTH (vec);
775 for (i = 0; i < n; ++i)
777 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
778 basic_block label_bb = label_to_block (lab);
779 make_edge (bb, label_bb, 0);
784 /* Return the basic block holding label DEST. */
786 basic_block
787 label_to_block (tree dest)
789 int uid = LABEL_DECL_UID (dest);
791 /* We would die hard when faced by an undefined label. Emit a label to
792 the very first basic block. This will hopefully make even the dataflow
793 and undefined variable warnings quite right. */
794 if ((errorcount || sorrycount) && uid < 0)
796 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
797 tree stmt;
799 stmt = build1 (LABEL_EXPR, void_type_node, dest);
800 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
801 uid = LABEL_DECL_UID (dest);
803 return VARRAY_BB (label_to_block_map, uid);
807 /* Create edges for a goto statement at block BB. */
809 static void
810 make_goto_expr_edges (basic_block bb)
812 tree goto_t, dest;
813 basic_block target_bb;
814 int for_call;
815 block_stmt_iterator last = bsi_last (bb);
817 goto_t = bsi_stmt (last);
819 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
820 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
821 from a nonlocal goto. */
822 if (TREE_CODE (goto_t) != GOTO_EXPR)
824 dest = error_mark_node;
825 for_call = 1;
827 else
829 dest = GOTO_DESTINATION (goto_t);
830 for_call = 0;
832 /* A GOTO to a local label creates normal edges. */
833 if (simple_goto_p (goto_t))
835 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
836 #ifdef USE_MAPPED_LOCATION
837 e->goto_locus = EXPR_LOCATION (goto_t);
838 #else
839 e->goto_locus = EXPR_LOCUS (goto_t);
840 #endif
841 bsi_remove (&last);
842 return;
845 /* Nothing more to do for nonlocal gotos. */
846 if (TREE_CODE (dest) == LABEL_DECL)
847 return;
849 /* Computed gotos remain. */
852 /* Look for the block starting with the destination label. In the
853 case of a computed goto, make an edge to any label block we find
854 in the CFG. */
855 FOR_EACH_BB (target_bb)
857 block_stmt_iterator bsi;
859 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
861 tree target = bsi_stmt (bsi);
863 if (TREE_CODE (target) != LABEL_EXPR)
864 break;
866 if (
867 /* Computed GOTOs. Make an edge to every label block that has
868 been marked as a potential target for a computed goto. */
869 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
870 /* Nonlocal GOTO target. Make an edge to every label block
871 that has been marked as a potential target for a nonlocal
872 goto. */
873 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
875 make_edge (bb, target_bb, EDGE_ABNORMAL);
876 break;
881 /* Degenerate case of computed goto with no labels. */
882 if (!for_call && EDGE_COUNT (bb->succs) == 0)
883 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
887 /*---------------------------------------------------------------------------
888 Flowgraph analysis
889 ---------------------------------------------------------------------------*/
891 /* Remove unreachable blocks and other miscellaneous clean up work. */
893 bool
894 cleanup_tree_cfg (void)
896 bool retval = false;
898 timevar_push (TV_TREE_CLEANUP_CFG);
900 retval = cleanup_control_flow ();
901 retval |= delete_unreachable_blocks ();
903 /* thread_jumps can redirect edges out of SWITCH_EXPRs, which can get
904 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
905 mappings around the call to thread_jumps. */
906 start_recording_case_labels ();
907 retval |= thread_jumps ();
908 end_recording_case_labels ();
910 #ifdef ENABLE_CHECKING
911 if (retval)
913 gcc_assert (!cleanup_control_flow ());
914 gcc_assert (!delete_unreachable_blocks ());
915 gcc_assert (!thread_jumps ());
917 #endif
919 /* Merging the blocks creates no new opportunities for the other
920 optimizations, so do it here. */
921 retval |= merge_seq_blocks ();
923 compact_blocks ();
925 #ifdef ENABLE_CHECKING
926 verify_flow_info ();
927 #endif
928 timevar_pop (TV_TREE_CLEANUP_CFG);
929 return retval;
933 /* Cleanup useless labels in basic blocks. This is something we wish
934 to do early because it allows us to group case labels before creating
935 the edges for the CFG, and it speeds up block statement iterators in
936 all passes later on.
937 We only run this pass once, running it more than once is probably not
938 profitable. */
940 /* A map from basic block index to the leading label of that block. */
941 static tree *label_for_bb;
943 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
944 static void
945 update_eh_label (struct eh_region *region)
947 tree old_label = get_eh_region_tree_label (region);
948 if (old_label)
950 tree new_label;
951 basic_block bb = label_to_block (old_label);
953 /* ??? After optimizing, there may be EH regions with labels
954 that have already been removed from the function body, so
955 there is no basic block for them. */
956 if (! bb)
957 return;
959 new_label = label_for_bb[bb->index];
960 set_eh_region_tree_label (region, new_label);
964 /* Given LABEL return the first label in the same basic block. */
965 static tree
966 main_block_label (tree label)
968 basic_block bb = label_to_block (label);
970 /* label_to_block possibly inserted undefined label into the chain. */
971 if (!label_for_bb[bb->index])
972 label_for_bb[bb->index] = label;
973 return label_for_bb[bb->index];
976 /* Cleanup redundant labels. This is a three-step process:
977 1) Find the leading label for each block.
978 2) Redirect all references to labels to the leading labels.
979 3) Cleanup all useless labels. */
981 void
982 cleanup_dead_labels (void)
984 basic_block bb;
985 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
987 /* Find a suitable label for each block. We use the first user-defined
988 label if there is one, or otherwise just the first label we see. */
989 FOR_EACH_BB (bb)
991 block_stmt_iterator i;
993 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
995 tree label, stmt = bsi_stmt (i);
997 if (TREE_CODE (stmt) != LABEL_EXPR)
998 break;
1000 label = LABEL_EXPR_LABEL (stmt);
1002 /* If we have not yet seen a label for the current block,
1003 remember this one and see if there are more labels. */
1004 if (! label_for_bb[bb->index])
1006 label_for_bb[bb->index] = label;
1007 continue;
1010 /* If we did see a label for the current block already, but it
1011 is an artificially created label, replace it if the current
1012 label is a user defined label. */
1013 if (! DECL_ARTIFICIAL (label)
1014 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1016 label_for_bb[bb->index] = label;
1017 break;
1022 /* Now redirect all jumps/branches to the selected label.
1023 First do so for each block ending in a control statement. */
1024 FOR_EACH_BB (bb)
1026 tree stmt = last_stmt (bb);
1027 if (!stmt)
1028 continue;
1030 switch (TREE_CODE (stmt))
1032 case COND_EXPR:
1034 tree true_branch, false_branch;
1036 true_branch = COND_EXPR_THEN (stmt);
1037 false_branch = COND_EXPR_ELSE (stmt);
1039 GOTO_DESTINATION (true_branch)
1040 = main_block_label (GOTO_DESTINATION (true_branch));
1041 GOTO_DESTINATION (false_branch)
1042 = main_block_label (GOTO_DESTINATION (false_branch));
1044 break;
1047 case SWITCH_EXPR:
1049 size_t i;
1050 tree vec = SWITCH_LABELS (stmt);
1051 size_t n = TREE_VEC_LENGTH (vec);
1053 /* Replace all destination labels. */
1054 for (i = 0; i < n; ++i)
1056 tree elt = TREE_VEC_ELT (vec, i);
1057 tree label = main_block_label (CASE_LABEL (elt));
1058 CASE_LABEL (elt) = label;
1060 break;
1063 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1064 remove them until after we've created the CFG edges. */
1065 case GOTO_EXPR:
1066 if (! computed_goto_p (stmt))
1068 GOTO_DESTINATION (stmt)
1069 = main_block_label (GOTO_DESTINATION (stmt));
1070 break;
1073 default:
1074 break;
1078 for_each_eh_region (update_eh_label);
1080 /* Finally, purge dead labels. All user-defined labels and labels that
1081 can be the target of non-local gotos are preserved. */
1082 FOR_EACH_BB (bb)
1084 block_stmt_iterator i;
1085 tree label_for_this_bb = label_for_bb[bb->index];
1087 if (! label_for_this_bb)
1088 continue;
1090 for (i = bsi_start (bb); !bsi_end_p (i); )
1092 tree label, stmt = bsi_stmt (i);
1094 if (TREE_CODE (stmt) != LABEL_EXPR)
1095 break;
1097 label = LABEL_EXPR_LABEL (stmt);
1099 if (label == label_for_this_bb
1100 || ! DECL_ARTIFICIAL (label)
1101 || DECL_NONLOCAL (label))
1102 bsi_next (&i);
1103 else
1104 bsi_remove (&i);
1108 free (label_for_bb);
1111 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1112 and scan the sorted vector of cases. Combine the ones jumping to the
1113 same label.
1114 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1116 void
1117 group_case_labels (void)
1119 basic_block bb;
1121 FOR_EACH_BB (bb)
1123 tree stmt = last_stmt (bb);
1124 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1126 tree labels = SWITCH_LABELS (stmt);
1127 int old_size = TREE_VEC_LENGTH (labels);
1128 int i, j, new_size = old_size;
1129 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1130 tree default_label;
1132 /* The default label is always the last case in a switch
1133 statement after gimplification. */
1134 default_label = CASE_LABEL (default_case);
1136 /* Look for possible opportunities to merge cases.
1137 Ignore the last element of the label vector because it
1138 must be the default case. */
1139 i = 0;
1140 while (i < old_size - 1)
1142 tree base_case, base_label, base_high, type;
1143 base_case = TREE_VEC_ELT (labels, i);
1145 gcc_assert (base_case);
1146 base_label = CASE_LABEL (base_case);
1148 /* Discard cases that have the same destination as the
1149 default case. */
1150 if (base_label == default_label)
1152 TREE_VEC_ELT (labels, i) = NULL_TREE;
1153 i++;
1154 new_size--;
1155 continue;
1158 type = TREE_TYPE (CASE_LOW (base_case));
1159 base_high = CASE_HIGH (base_case) ?
1160 CASE_HIGH (base_case) : CASE_LOW (base_case);
1161 i++;
1162 /* Try to merge case labels. Break out when we reach the end
1163 of the label vector or when we cannot merge the next case
1164 label with the current one. */
1165 while (i < old_size - 1)
1167 tree merge_case = TREE_VEC_ELT (labels, i);
1168 tree merge_label = CASE_LABEL (merge_case);
1169 tree t = int_const_binop (PLUS_EXPR, base_high,
1170 integer_one_node, 1);
1172 /* Merge the cases if they jump to the same place,
1173 and their ranges are consecutive. */
1174 if (merge_label == base_label
1175 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1177 base_high = CASE_HIGH (merge_case) ?
1178 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1179 CASE_HIGH (base_case) = base_high;
1180 TREE_VEC_ELT (labels, i) = NULL_TREE;
1181 new_size--;
1182 i++;
1184 else
1185 break;
1189 /* Compress the case labels in the label vector, and adjust the
1190 length of the vector. */
1191 for (i = 0, j = 0; i < new_size; i++)
1193 while (! TREE_VEC_ELT (labels, j))
1194 j++;
1195 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1197 TREE_VEC_LENGTH (labels) = new_size;
1202 /* Checks whether we can merge block B into block A. */
1204 static bool
1205 tree_can_merge_blocks_p (basic_block a, basic_block b)
1207 tree stmt;
1208 block_stmt_iterator bsi;
1210 if (EDGE_COUNT (a->succs) != 1)
1211 return false;
1213 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
1214 return false;
1216 if (EDGE_SUCC (a, 0)->dest != b)
1217 return false;
1219 if (b == EXIT_BLOCK_PTR)
1220 return false;
1222 if (EDGE_COUNT (b->preds) > 1)
1223 return false;
1225 /* If A ends by a statement causing exceptions or something similar, we
1226 cannot merge the blocks. */
1227 stmt = last_stmt (a);
1228 if (stmt && stmt_ends_bb_p (stmt))
1229 return false;
1231 /* Do not allow a block with only a non-local label to be merged. */
1232 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1233 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1234 return false;
1236 /* There may be no phi nodes at the start of b. Most of these degenerate
1237 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1238 if (phi_nodes (b))
1239 return false;
1241 /* Do not remove user labels. */
1242 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1244 stmt = bsi_stmt (bsi);
1245 if (TREE_CODE (stmt) != LABEL_EXPR)
1246 break;
1247 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1248 return false;
1251 return true;
1255 /* Merge block B into block A. */
1257 static void
1258 tree_merge_blocks (basic_block a, basic_block b)
1260 block_stmt_iterator bsi;
1261 tree_stmt_iterator last;
1263 if (dump_file)
1264 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1266 /* Ensure that B follows A. */
1267 move_block_after (b, a);
1269 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1270 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1272 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1273 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1275 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1276 bsi_remove (&bsi);
1277 else
1279 set_bb_for_stmt (bsi_stmt (bsi), a);
1280 bsi_next (&bsi);
1284 /* Merge the chains. */
1285 last = tsi_last (a->stmt_list);
1286 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1287 b->stmt_list = NULL;
1291 /* Walk the function tree removing unnecessary statements.
1293 * Empty statement nodes are removed
1295 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1297 * Unnecessary COND_EXPRs are removed
1299 * Some unnecessary BIND_EXPRs are removed
1301 Clearly more work could be done. The trick is doing the analysis
1302 and removal fast enough to be a net improvement in compile times.
1304 Note that when we remove a control structure such as a COND_EXPR
1305 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1306 to ensure we eliminate all the useless code. */
1308 struct rus_data
1310 tree *last_goto;
1311 bool repeat;
1312 bool may_throw;
1313 bool may_branch;
1314 bool has_label;
1317 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1319 static bool
1320 remove_useless_stmts_warn_notreached (tree stmt)
1322 if (EXPR_HAS_LOCATION (stmt))
1324 location_t loc = EXPR_LOCATION (stmt);
1325 warning ("%Hwill never be executed", &loc);
1326 return true;
1329 switch (TREE_CODE (stmt))
1331 case STATEMENT_LIST:
1333 tree_stmt_iterator i;
1334 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1335 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1336 return true;
1338 break;
1340 case COND_EXPR:
1341 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1342 return true;
1343 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1344 return true;
1345 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1346 return true;
1347 break;
1349 case TRY_FINALLY_EXPR:
1350 case TRY_CATCH_EXPR:
1351 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1352 return true;
1353 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1354 return true;
1355 break;
1357 case CATCH_EXPR:
1358 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1359 case EH_FILTER_EXPR:
1360 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1361 case BIND_EXPR:
1362 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1364 default:
1365 /* Not a live container. */
1366 break;
1369 return false;
1372 static void
1373 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1375 tree then_clause, else_clause, cond;
1376 bool save_has_label, then_has_label, else_has_label;
1378 save_has_label = data->has_label;
1379 data->has_label = false;
1380 data->last_goto = NULL;
1382 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1384 then_has_label = data->has_label;
1385 data->has_label = false;
1386 data->last_goto = NULL;
1388 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1390 else_has_label = data->has_label;
1391 data->has_label = save_has_label | then_has_label | else_has_label;
1393 then_clause = COND_EXPR_THEN (*stmt_p);
1394 else_clause = COND_EXPR_ELSE (*stmt_p);
1395 cond = COND_EXPR_COND (*stmt_p);
1397 /* If neither arm does anything at all, we can remove the whole IF. */
1398 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1400 *stmt_p = build_empty_stmt ();
1401 data->repeat = true;
1404 /* If there are no reachable statements in an arm, then we can
1405 zap the entire conditional. */
1406 else if (integer_nonzerop (cond) && !else_has_label)
1408 if (warn_notreached)
1409 remove_useless_stmts_warn_notreached (else_clause);
1410 *stmt_p = then_clause;
1411 data->repeat = true;
1413 else if (integer_zerop (cond) && !then_has_label)
1415 if (warn_notreached)
1416 remove_useless_stmts_warn_notreached (then_clause);
1417 *stmt_p = else_clause;
1418 data->repeat = true;
1421 /* Check a couple of simple things on then/else with single stmts. */
1422 else
1424 tree then_stmt = expr_only (then_clause);
1425 tree else_stmt = expr_only (else_clause);
1427 /* Notice branches to a common destination. */
1428 if (then_stmt && else_stmt
1429 && TREE_CODE (then_stmt) == GOTO_EXPR
1430 && TREE_CODE (else_stmt) == GOTO_EXPR
1431 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1433 *stmt_p = then_stmt;
1434 data->repeat = true;
1437 /* If the THEN/ELSE clause merely assigns a value to a variable or
1438 parameter which is already known to contain that value, then
1439 remove the useless THEN/ELSE clause. */
1440 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1442 if (else_stmt
1443 && TREE_CODE (else_stmt) == MODIFY_EXPR
1444 && TREE_OPERAND (else_stmt, 0) == cond
1445 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1446 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1448 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1449 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1450 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1451 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1453 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1454 ? then_stmt : else_stmt);
1455 tree *location = (TREE_CODE (cond) == EQ_EXPR
1456 ? &COND_EXPR_THEN (*stmt_p)
1457 : &COND_EXPR_ELSE (*stmt_p));
1459 if (stmt
1460 && TREE_CODE (stmt) == MODIFY_EXPR
1461 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1462 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1463 *location = alloc_stmt_list ();
1467 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1468 would be re-introduced during lowering. */
1469 data->last_goto = NULL;
1473 static void
1474 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1476 bool save_may_branch, save_may_throw;
1477 bool this_may_branch, this_may_throw;
1479 /* Collect may_branch and may_throw information for the body only. */
1480 save_may_branch = data->may_branch;
1481 save_may_throw = data->may_throw;
1482 data->may_branch = false;
1483 data->may_throw = false;
1484 data->last_goto = NULL;
1486 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1488 this_may_branch = data->may_branch;
1489 this_may_throw = data->may_throw;
1490 data->may_branch |= save_may_branch;
1491 data->may_throw |= save_may_throw;
1492 data->last_goto = NULL;
1494 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1496 /* If the body is empty, then we can emit the FINALLY block without
1497 the enclosing TRY_FINALLY_EXPR. */
1498 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1500 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1501 data->repeat = true;
1504 /* If the handler is empty, then we can emit the TRY block without
1505 the enclosing TRY_FINALLY_EXPR. */
1506 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1508 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1509 data->repeat = true;
1512 /* If the body neither throws, nor branches, then we can safely
1513 string the TRY and FINALLY blocks together. */
1514 else if (!this_may_branch && !this_may_throw)
1516 tree stmt = *stmt_p;
1517 *stmt_p = TREE_OPERAND (stmt, 0);
1518 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1519 data->repeat = true;
1524 static void
1525 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1527 bool save_may_throw, this_may_throw;
1528 tree_stmt_iterator i;
1529 tree stmt;
1531 /* Collect may_throw information for the body only. */
1532 save_may_throw = data->may_throw;
1533 data->may_throw = false;
1534 data->last_goto = NULL;
1536 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1538 this_may_throw = data->may_throw;
1539 data->may_throw = save_may_throw;
1541 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1542 if (!this_may_throw)
1544 if (warn_notreached)
1545 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1546 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1547 data->repeat = true;
1548 return;
1551 /* Process the catch clause specially. We may be able to tell that
1552 no exceptions propagate past this point. */
1554 this_may_throw = true;
1555 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1556 stmt = tsi_stmt (i);
1557 data->last_goto = NULL;
1559 switch (TREE_CODE (stmt))
1561 case CATCH_EXPR:
1562 for (; !tsi_end_p (i); tsi_next (&i))
1564 stmt = tsi_stmt (i);
1565 /* If we catch all exceptions, then the body does not
1566 propagate exceptions past this point. */
1567 if (CATCH_TYPES (stmt) == NULL)
1568 this_may_throw = false;
1569 data->last_goto = NULL;
1570 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1572 break;
1574 case EH_FILTER_EXPR:
1575 if (EH_FILTER_MUST_NOT_THROW (stmt))
1576 this_may_throw = false;
1577 else if (EH_FILTER_TYPES (stmt) == NULL)
1578 this_may_throw = false;
1579 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1580 break;
1582 default:
1583 /* Otherwise this is a cleanup. */
1584 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1586 /* If the cleanup is empty, then we can emit the TRY block without
1587 the enclosing TRY_CATCH_EXPR. */
1588 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1590 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1591 data->repeat = true;
1593 break;
1595 data->may_throw |= this_may_throw;
1599 static void
1600 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1602 tree block;
1604 /* First remove anything underneath the BIND_EXPR. */
1605 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1607 /* If the BIND_EXPR has no variables, then we can pull everything
1608 up one level and remove the BIND_EXPR, unless this is the toplevel
1609 BIND_EXPR for the current function or an inlined function.
1611 When this situation occurs we will want to apply this
1612 optimization again. */
1613 block = BIND_EXPR_BLOCK (*stmt_p);
1614 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1615 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1616 && (! block
1617 || ! BLOCK_ABSTRACT_ORIGIN (block)
1618 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1619 != FUNCTION_DECL)))
1621 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1622 data->repeat = true;
1627 static void
1628 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1630 tree dest = GOTO_DESTINATION (*stmt_p);
1632 data->may_branch = true;
1633 data->last_goto = NULL;
1635 /* Record the last goto expr, so that we can delete it if unnecessary. */
1636 if (TREE_CODE (dest) == LABEL_DECL)
1637 data->last_goto = stmt_p;
1641 static void
1642 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1644 tree label = LABEL_EXPR_LABEL (*stmt_p);
1646 data->has_label = true;
1648 /* We do want to jump across non-local label receiver code. */
1649 if (DECL_NONLOCAL (label))
1650 data->last_goto = NULL;
1652 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1654 *data->last_goto = build_empty_stmt ();
1655 data->repeat = true;
1658 /* ??? Add something here to delete unused labels. */
1662 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1663 decl. This allows us to eliminate redundant or useless
1664 calls to "const" functions.
1666 Gimplifier already does the same operation, but we may notice functions
1667 being const and pure once their calls has been gimplified, so we need
1668 to update the flag. */
1670 static void
1671 update_call_expr_flags (tree call)
1673 tree decl = get_callee_fndecl (call);
1674 if (!decl)
1675 return;
1676 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1677 TREE_SIDE_EFFECTS (call) = 0;
1678 if (TREE_NOTHROW (decl))
1679 TREE_NOTHROW (call) = 1;
1683 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1685 void
1686 notice_special_calls (tree t)
1688 int flags = call_expr_flags (t);
1690 if (flags & ECF_MAY_BE_ALLOCA)
1691 current_function_calls_alloca = true;
1692 if (flags & ECF_RETURNS_TWICE)
1693 current_function_calls_setjmp = true;
1697 /* Clear flags set by notice_special_calls. Used by dead code removal
1698 to update the flags. */
1700 void
1701 clear_special_calls (void)
1703 current_function_calls_alloca = false;
1704 current_function_calls_setjmp = false;
1708 static void
1709 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1711 tree t = *tp, op;
1713 switch (TREE_CODE (t))
1715 case COND_EXPR:
1716 remove_useless_stmts_cond (tp, data);
1717 break;
1719 case TRY_FINALLY_EXPR:
1720 remove_useless_stmts_tf (tp, data);
1721 break;
1723 case TRY_CATCH_EXPR:
1724 remove_useless_stmts_tc (tp, data);
1725 break;
1727 case BIND_EXPR:
1728 remove_useless_stmts_bind (tp, data);
1729 break;
1731 case GOTO_EXPR:
1732 remove_useless_stmts_goto (tp, data);
1733 break;
1735 case LABEL_EXPR:
1736 remove_useless_stmts_label (tp, data);
1737 break;
1739 case RETURN_EXPR:
1740 fold_stmt (tp);
1741 data->last_goto = NULL;
1742 data->may_branch = true;
1743 break;
1745 case CALL_EXPR:
1746 fold_stmt (tp);
1747 data->last_goto = NULL;
1748 notice_special_calls (t);
1749 update_call_expr_flags (t);
1750 if (tree_could_throw_p (t))
1751 data->may_throw = true;
1752 break;
1754 case MODIFY_EXPR:
1755 data->last_goto = NULL;
1756 fold_stmt (tp);
1757 op = get_call_expr_in (t);
1758 if (op)
1760 update_call_expr_flags (op);
1761 notice_special_calls (op);
1763 if (tree_could_throw_p (t))
1764 data->may_throw = true;
1765 break;
1767 case STATEMENT_LIST:
1769 tree_stmt_iterator i = tsi_start (t);
1770 while (!tsi_end_p (i))
1772 t = tsi_stmt (i);
1773 if (IS_EMPTY_STMT (t))
1775 tsi_delink (&i);
1776 continue;
1779 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1781 t = tsi_stmt (i);
1782 if (TREE_CODE (t) == STATEMENT_LIST)
1784 tsi_link_before (&i, t, TSI_SAME_STMT);
1785 tsi_delink (&i);
1787 else
1788 tsi_next (&i);
1791 break;
1792 case ASM_EXPR:
1793 fold_stmt (tp);
1794 data->last_goto = NULL;
1795 break;
1797 default:
1798 data->last_goto = NULL;
1799 break;
1803 static void
1804 remove_useless_stmts (void)
1806 struct rus_data data;
1808 clear_special_calls ();
1812 memset (&data, 0, sizeof (data));
1813 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1815 while (data.repeat);
1819 struct tree_opt_pass pass_remove_useless_stmts =
1821 "useless", /* name */
1822 NULL, /* gate */
1823 remove_useless_stmts, /* execute */
1824 NULL, /* sub */
1825 NULL, /* next */
1826 0, /* static_pass_number */
1827 0, /* tv_id */
1828 PROP_gimple_any, /* properties_required */
1829 0, /* properties_provided */
1830 0, /* properties_destroyed */
1831 0, /* todo_flags_start */
1832 TODO_dump_func, /* todo_flags_finish */
1833 0 /* letter */
1837 /* Remove obviously useless statements in basic block BB. */
1839 static void
1840 cfg_remove_useless_stmts_bb (basic_block bb)
1842 block_stmt_iterator bsi;
1843 tree stmt = NULL_TREE;
1844 tree cond, var = NULL_TREE, val = NULL_TREE;
1845 struct var_ann_d *ann;
1847 /* Check whether we come here from a condition, and if so, get the
1848 condition. */
1849 if (EDGE_COUNT (bb->preds) != 1
1850 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1851 return;
1853 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
1855 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1857 var = cond;
1858 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1859 ? boolean_false_node : boolean_true_node);
1861 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1862 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1863 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1865 var = TREE_OPERAND (cond, 0);
1866 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1867 ? boolean_true_node : boolean_false_node);
1869 else
1871 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
1872 cond = invert_truthvalue (cond);
1873 if (TREE_CODE (cond) == EQ_EXPR
1874 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1875 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1876 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1877 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1878 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1880 var = TREE_OPERAND (cond, 0);
1881 val = TREE_OPERAND (cond, 1);
1883 else
1884 return;
1887 /* Only work for normal local variables. */
1888 ann = var_ann (var);
1889 if (!ann
1890 || ann->may_aliases
1891 || TREE_ADDRESSABLE (var))
1892 return;
1894 if (! TREE_CONSTANT (val))
1896 ann = var_ann (val);
1897 if (!ann
1898 || ann->may_aliases
1899 || TREE_ADDRESSABLE (val))
1900 return;
1903 /* Ignore floating point variables, since comparison behaves weird for
1904 them. */
1905 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1906 return;
1908 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1910 stmt = bsi_stmt (bsi);
1912 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1913 which is already known to contain that value, then remove the useless
1914 THEN/ELSE clause. */
1915 if (TREE_CODE (stmt) == MODIFY_EXPR
1916 && TREE_OPERAND (stmt, 0) == var
1917 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1919 bsi_remove (&bsi);
1920 continue;
1923 /* Invalidate the var if we encounter something that could modify it.
1924 Likewise for the value it was previously set to. Note that we only
1925 consider values that are either a VAR_DECL or PARM_DECL so we
1926 can test for conflict very simply. */
1927 if (TREE_CODE (stmt) == ASM_EXPR
1928 || (TREE_CODE (stmt) == MODIFY_EXPR
1929 && (TREE_OPERAND (stmt, 0) == var
1930 || TREE_OPERAND (stmt, 0) == val)))
1931 return;
1933 bsi_next (&bsi);
1938 /* A CFG-aware version of remove_useless_stmts. */
1940 void
1941 cfg_remove_useless_stmts (void)
1943 basic_block bb;
1945 #ifdef ENABLE_CHECKING
1946 verify_flow_info ();
1947 #endif
1949 FOR_EACH_BB (bb)
1951 cfg_remove_useless_stmts_bb (bb);
1956 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1958 static void
1959 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1961 tree phi;
1963 /* Since this block is no longer reachable, we can just delete all
1964 of its PHI nodes. */
1965 phi = phi_nodes (bb);
1966 while (phi)
1968 tree next = PHI_CHAIN (phi);
1969 remove_phi_node (phi, NULL_TREE, bb);
1970 phi = next;
1973 /* Remove edges to BB's successors. */
1974 while (EDGE_COUNT (bb->succs) > 0)
1975 remove_edge (EDGE_SUCC (bb, 0));
1979 /* Remove statements of basic block BB. */
1981 static void
1982 remove_bb (basic_block bb)
1984 block_stmt_iterator i;
1985 source_locus loc = 0;
1987 if (dump_file)
1989 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1990 if (dump_flags & TDF_DETAILS)
1992 dump_bb (bb, dump_file, 0);
1993 fprintf (dump_file, "\n");
1997 /* Remove all the instructions in the block. */
1998 for (i = bsi_start (bb); !bsi_end_p (i);)
2000 tree stmt = bsi_stmt (i);
2001 if (TREE_CODE (stmt) == LABEL_EXPR
2002 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2004 basic_block new_bb = bb->prev_bb;
2005 block_stmt_iterator new_bsi = bsi_after_labels (new_bb);
2007 bsi_remove (&i);
2008 bsi_insert_after (&new_bsi, stmt, BSI_NEW_STMT);
2010 else
2012 release_defs (stmt);
2014 set_bb_for_stmt (stmt, NULL);
2015 bsi_remove (&i);
2018 /* Don't warn for removed gotos. Gotos are often removed due to
2019 jump threading, thus resulting in bogus warnings. Not great,
2020 since this way we lose warnings for gotos in the original
2021 program that are indeed unreachable. */
2022 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2023 #ifdef USE_MAPPED_LOCATION
2024 loc = EXPR_LOCATION (stmt);
2025 #else
2026 loc = EXPR_LOCUS (stmt);
2027 #endif
2030 /* If requested, give a warning that the first statement in the
2031 block is unreachable. We walk statements backwards in the
2032 loop above, so the last statement we process is the first statement
2033 in the block. */
2034 if (warn_notreached && loc)
2035 #ifdef USE_MAPPED_LOCATION
2036 warning ("%Hwill never be executed", &loc);
2037 #else
2038 warning ("%Hwill never be executed", loc);
2039 #endif
2041 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2044 /* Try to remove superfluous control structures. */
2046 static bool
2047 cleanup_control_flow (void)
2049 basic_block bb;
2050 block_stmt_iterator bsi;
2051 bool retval = false;
2052 tree stmt;
2054 FOR_EACH_BB (bb)
2056 bsi = bsi_last (bb);
2058 if (bsi_end_p (bsi))
2059 continue;
2061 stmt = bsi_stmt (bsi);
2062 if (TREE_CODE (stmt) == COND_EXPR
2063 || TREE_CODE (stmt) == SWITCH_EXPR)
2064 retval |= cleanup_control_expr_graph (bb, bsi);
2066 return retval;
2070 /* Disconnect an unreachable block in the control expression starting
2071 at block BB. */
2073 static bool
2074 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2076 edge taken_edge;
2077 bool retval = false;
2078 tree expr = bsi_stmt (bsi), val;
2080 if (EDGE_COUNT (bb->succs) > 1)
2082 edge e;
2083 edge_iterator ei;
2085 switch (TREE_CODE (expr))
2087 case COND_EXPR:
2088 val = COND_EXPR_COND (expr);
2089 break;
2091 case SWITCH_EXPR:
2092 val = SWITCH_COND (expr);
2093 if (TREE_CODE (val) != INTEGER_CST)
2094 return false;
2095 break;
2097 default:
2098 gcc_unreachable ();
2101 taken_edge = find_taken_edge (bb, val);
2102 if (!taken_edge)
2103 return false;
2105 /* Remove all the edges except the one that is always executed. */
2106 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2108 if (e != taken_edge)
2110 taken_edge->probability += e->probability;
2111 taken_edge->count += e->count;
2112 remove_edge (e);
2113 retval = true;
2115 else
2116 ei_next (&ei);
2118 if (taken_edge->probability > REG_BR_PROB_BASE)
2119 taken_edge->probability = REG_BR_PROB_BASE;
2121 else
2122 taken_edge = EDGE_SUCC (bb, 0);
2124 bsi_remove (&bsi);
2125 taken_edge->flags = EDGE_FALLTHRU;
2127 /* We removed some paths from the cfg. */
2128 free_dominance_info (CDI_DOMINATORS);
2130 return retval;
2134 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2135 predicate VAL, return the edge that will be taken out of the block.
2136 If VAL does not match a unique edge, NULL is returned. */
2138 edge
2139 find_taken_edge (basic_block bb, tree val)
2141 tree stmt;
2143 stmt = last_stmt (bb);
2145 gcc_assert (stmt);
2146 gcc_assert (is_ctrl_stmt (stmt));
2147 gcc_assert (val);
2149 /* If VAL is a predicate of the form N RELOP N, where N is an
2150 SSA_NAME, we can usually determine its truth value. */
2151 if (COMPARISON_CLASS_P (val))
2152 val = fold (val);
2154 /* If VAL is not a constant, we can't determine which edge might
2155 be taken. */
2156 if (!really_constant_p (val))
2157 return NULL;
2159 if (TREE_CODE (stmt) == COND_EXPR)
2160 return find_taken_edge_cond_expr (bb, val);
2162 if (TREE_CODE (stmt) == SWITCH_EXPR)
2163 return find_taken_edge_switch_expr (bb, val);
2165 gcc_unreachable ();
2169 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2170 statement, determine which of the two edges will be taken out of the
2171 block. Return NULL if either edge may be taken. */
2173 static edge
2174 find_taken_edge_cond_expr (basic_block bb, tree val)
2176 edge true_edge, false_edge;
2178 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2180 /* Otherwise, try to determine which branch of the if() will be taken.
2181 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2182 we don't really know which edge will be taken at runtime. This
2183 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2184 if (integer_nonzerop (val))
2185 return true_edge;
2186 else if (integer_zerop (val))
2187 return false_edge;
2188 else
2189 return NULL;
2193 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2194 statement, determine which edge will be taken out of the block. Return
2195 NULL if any edge may be taken. */
2197 static edge
2198 find_taken_edge_switch_expr (basic_block bb, tree val)
2200 tree switch_expr, taken_case;
2201 basic_block dest_bb;
2202 edge e;
2204 if (TREE_CODE (val) != INTEGER_CST)
2205 return NULL;
2207 switch_expr = last_stmt (bb);
2208 taken_case = find_case_label_for_value (switch_expr, val);
2209 dest_bb = label_to_block (CASE_LABEL (taken_case));
2211 e = find_edge (bb, dest_bb);
2212 gcc_assert (e);
2213 return e;
2217 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2218 We can make optimal use here of the fact that the case labels are
2219 sorted: We can do a binary search for a case matching VAL. */
2221 static tree
2222 find_case_label_for_value (tree switch_expr, tree val)
2224 tree vec = SWITCH_LABELS (switch_expr);
2225 size_t low, high, n = TREE_VEC_LENGTH (vec);
2226 tree default_case = TREE_VEC_ELT (vec, n - 1);
2228 for (low = -1, high = n - 1; high - low > 1; )
2230 size_t i = (high + low) / 2;
2231 tree t = TREE_VEC_ELT (vec, i);
2232 int cmp;
2234 /* Cache the result of comparing CASE_LOW and val. */
2235 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2237 if (cmp > 0)
2238 high = i;
2239 else
2240 low = i;
2242 if (CASE_HIGH (t) == NULL)
2244 /* A singe-valued case label. */
2245 if (cmp == 0)
2246 return t;
2248 else
2250 /* A case range. We can only handle integer ranges. */
2251 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2252 return t;
2256 return default_case;
2260 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2261 those alternatives are equal in each of the PHI nodes, then return
2262 true, else return false. */
2264 static bool
2265 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2267 int n1 = e1->dest_idx;
2268 int n2 = e2->dest_idx;
2269 tree phi;
2271 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2273 tree val1 = PHI_ARG_DEF (phi, n1);
2274 tree val2 = PHI_ARG_DEF (phi, n2);
2276 gcc_assert (val1 != NULL_TREE);
2277 gcc_assert (val2 != NULL_TREE);
2279 if (!operand_equal_for_phi_arg_p (val1, val2))
2280 return false;
2283 return true;
2287 /*---------------------------------------------------------------------------
2288 Debugging functions
2289 ---------------------------------------------------------------------------*/
2291 /* Dump tree-specific information of block BB to file OUTF. */
2293 void
2294 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2296 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2300 /* Dump a basic block on stderr. */
2302 void
2303 debug_tree_bb (basic_block bb)
2305 dump_bb (bb, stderr, 0);
2309 /* Dump basic block with index N on stderr. */
2311 basic_block
2312 debug_tree_bb_n (int n)
2314 debug_tree_bb (BASIC_BLOCK (n));
2315 return BASIC_BLOCK (n);
2319 /* Dump the CFG on stderr.
2321 FLAGS are the same used by the tree dumping functions
2322 (see TDF_* in tree.h). */
2324 void
2325 debug_tree_cfg (int flags)
2327 dump_tree_cfg (stderr, flags);
2331 /* Dump the program showing basic block boundaries on the given FILE.
2333 FLAGS are the same used by the tree dumping functions (see TDF_* in
2334 tree.h). */
2336 void
2337 dump_tree_cfg (FILE *file, int flags)
2339 if (flags & TDF_DETAILS)
2341 const char *funcname
2342 = lang_hooks.decl_printable_name (current_function_decl, 2);
2344 fputc ('\n', file);
2345 fprintf (file, ";; Function %s\n\n", funcname);
2346 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2347 n_basic_blocks, n_edges, last_basic_block);
2349 brief_dump_cfg (file);
2350 fprintf (file, "\n");
2353 if (flags & TDF_STATS)
2354 dump_cfg_stats (file);
2356 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2360 /* Dump CFG statistics on FILE. */
2362 void
2363 dump_cfg_stats (FILE *file)
2365 static long max_num_merged_labels = 0;
2366 unsigned long size, total = 0;
2367 int n_edges;
2368 basic_block bb;
2369 const char * const fmt_str = "%-30s%-13s%12s\n";
2370 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2371 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2372 const char *funcname
2373 = lang_hooks.decl_printable_name (current_function_decl, 2);
2376 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, fmt_str, "", " Number of ", "Memory");
2380 fprintf (file, fmt_str, "", " instances ", "used ");
2381 fprintf (file, "---------------------------------------------------------\n");
2383 size = n_basic_blocks * sizeof (struct basic_block_def);
2384 total += size;
2385 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2386 SCALE (size), LABEL (size));
2388 n_edges = 0;
2389 FOR_EACH_BB (bb)
2390 n_edges += EDGE_COUNT (bb->succs);
2391 size = n_edges * sizeof (struct edge_def);
2392 total += size;
2393 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2395 size = n_basic_blocks * sizeof (struct bb_ann_d);
2396 total += size;
2397 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2398 SCALE (size), LABEL (size));
2400 fprintf (file, "---------------------------------------------------------\n");
2401 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2402 LABEL (total));
2403 fprintf (file, "---------------------------------------------------------\n");
2404 fprintf (file, "\n");
2406 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2407 max_num_merged_labels = cfg_stats.num_merged_labels;
2409 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2410 cfg_stats.num_merged_labels, max_num_merged_labels);
2412 fprintf (file, "\n");
2416 /* Dump CFG statistics on stderr. Keep extern so that it's always
2417 linked in the final executable. */
2419 void
2420 debug_cfg_stats (void)
2422 dump_cfg_stats (stderr);
2426 /* Dump the flowgraph to a .vcg FILE. */
2428 static void
2429 tree_cfg2vcg (FILE *file)
2431 edge e;
2432 edge_iterator ei;
2433 basic_block bb;
2434 const char *funcname
2435 = lang_hooks.decl_printable_name (current_function_decl, 2);
2437 /* Write the file header. */
2438 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2439 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2440 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2442 /* Write blocks and edges. */
2443 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2445 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2446 e->dest->index);
2448 if (e->flags & EDGE_FAKE)
2449 fprintf (file, " linestyle: dotted priority: 10");
2450 else
2451 fprintf (file, " linestyle: solid priority: 100");
2453 fprintf (file, " }\n");
2455 fputc ('\n', file);
2457 FOR_EACH_BB (bb)
2459 enum tree_code head_code, end_code;
2460 const char *head_name, *end_name;
2461 int head_line = 0;
2462 int end_line = 0;
2463 tree first = first_stmt (bb);
2464 tree last = last_stmt (bb);
2466 if (first)
2468 head_code = TREE_CODE (first);
2469 head_name = tree_code_name[head_code];
2470 head_line = get_lineno (first);
2472 else
2473 head_name = "no-statement";
2475 if (last)
2477 end_code = TREE_CODE (last);
2478 end_name = tree_code_name[end_code];
2479 end_line = get_lineno (last);
2481 else
2482 end_name = "no-statement";
2484 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2485 bb->index, bb->index, head_name, head_line, end_name,
2486 end_line);
2488 FOR_EACH_EDGE (e, ei, bb->succs)
2490 if (e->dest == EXIT_BLOCK_PTR)
2491 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2492 else
2493 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2495 if (e->flags & EDGE_FAKE)
2496 fprintf (file, " priority: 10 linestyle: dotted");
2497 else
2498 fprintf (file, " priority: 100 linestyle: solid");
2500 fprintf (file, " }\n");
2503 if (bb->next_bb != EXIT_BLOCK_PTR)
2504 fputc ('\n', file);
2507 fputs ("}\n\n", file);
2512 /*---------------------------------------------------------------------------
2513 Miscellaneous helpers
2514 ---------------------------------------------------------------------------*/
2516 /* Return true if T represents a stmt that always transfers control. */
2518 bool
2519 is_ctrl_stmt (tree t)
2521 return (TREE_CODE (t) == COND_EXPR
2522 || TREE_CODE (t) == SWITCH_EXPR
2523 || TREE_CODE (t) == GOTO_EXPR
2524 || TREE_CODE (t) == RETURN_EXPR
2525 || TREE_CODE (t) == RESX_EXPR);
2529 /* Return true if T is a statement that may alter the flow of control
2530 (e.g., a call to a non-returning function). */
2532 bool
2533 is_ctrl_altering_stmt (tree t)
2535 tree call;
2537 gcc_assert (t);
2538 call = get_call_expr_in (t);
2539 if (call)
2541 /* A non-pure/const CALL_EXPR alters flow control if the current
2542 function has nonlocal labels. */
2543 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2544 return true;
2546 /* A CALL_EXPR also alters control flow if it does not return. */
2547 if (call_expr_flags (call) & ECF_NORETURN)
2548 return true;
2551 /* If a statement can throw, it alters control flow. */
2552 return tree_can_throw_internal (t);
2556 /* Return true if T is a computed goto. */
2558 bool
2559 computed_goto_p (tree t)
2561 return (TREE_CODE (t) == GOTO_EXPR
2562 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2566 /* Checks whether EXPR is a simple local goto. */
2568 bool
2569 simple_goto_p (tree expr)
2571 return (TREE_CODE (expr) == GOTO_EXPR
2572 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2576 /* Return true if T should start a new basic block. PREV_T is the
2577 statement preceding T. It is used when T is a label or a case label.
2578 Labels should only start a new basic block if their previous statement
2579 wasn't a label. Otherwise, sequence of labels would generate
2580 unnecessary basic blocks that only contain a single label. */
2582 static inline bool
2583 stmt_starts_bb_p (tree t, tree prev_t)
2585 enum tree_code code;
2587 if (t == NULL_TREE)
2588 return false;
2590 /* LABEL_EXPRs start a new basic block only if the preceding
2591 statement wasn't a label of the same type. This prevents the
2592 creation of consecutive blocks that have nothing but a single
2593 label. */
2594 code = TREE_CODE (t);
2595 if (code == LABEL_EXPR)
2597 /* Nonlocal and computed GOTO targets always start a new block. */
2598 if (code == LABEL_EXPR
2599 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2600 || FORCED_LABEL (LABEL_EXPR_LABEL (t))))
2601 return true;
2603 if (prev_t && TREE_CODE (prev_t) == code)
2605 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2606 return true;
2608 cfg_stats.num_merged_labels++;
2609 return false;
2611 else
2612 return true;
2615 return false;
2619 /* Return true if T should end a basic block. */
2621 bool
2622 stmt_ends_bb_p (tree t)
2624 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2628 /* Add gotos that used to be represented implicitly in the CFG. */
2630 void
2631 disband_implicit_edges (void)
2633 basic_block bb;
2634 block_stmt_iterator last;
2635 edge e;
2636 edge_iterator ei;
2637 tree stmt, label;
2639 FOR_EACH_BB (bb)
2641 last = bsi_last (bb);
2642 stmt = last_stmt (bb);
2644 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2646 /* Remove superfluous gotos from COND_EXPR branches. Moved
2647 from cfg_remove_useless_stmts here since it violates the
2648 invariants for tree--cfg correspondence and thus fits better
2649 here where we do it anyway. */
2650 e = find_edge (bb, bb->next_bb);
2651 if (e)
2653 if (e->flags & EDGE_TRUE_VALUE)
2654 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2655 else if (e->flags & EDGE_FALSE_VALUE)
2656 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2657 else
2658 gcc_unreachable ();
2659 e->flags |= EDGE_FALLTHRU;
2662 continue;
2665 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2667 /* Remove the RETURN_EXPR if we may fall though to the exit
2668 instead. */
2669 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2670 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
2672 if (bb->next_bb == EXIT_BLOCK_PTR
2673 && !TREE_OPERAND (stmt, 0))
2675 bsi_remove (&last);
2676 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
2678 continue;
2681 /* There can be no fallthru edge if the last statement is a control
2682 one. */
2683 if (stmt && is_ctrl_stmt (stmt))
2684 continue;
2686 /* Find a fallthru edge and emit the goto if necessary. */
2687 FOR_EACH_EDGE (e, ei, bb->succs)
2688 if (e->flags & EDGE_FALLTHRU)
2689 break;
2691 if (!e || e->dest == bb->next_bb)
2692 continue;
2694 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2695 label = tree_block_label (e->dest);
2697 stmt = build1 (GOTO_EXPR, void_type_node, label);
2698 #ifdef USE_MAPPED_LOCATION
2699 SET_EXPR_LOCATION (stmt, e->goto_locus);
2700 #else
2701 SET_EXPR_LOCUS (stmt, e->goto_locus);
2702 #endif
2703 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2704 e->flags &= ~EDGE_FALLTHRU;
2708 /* Remove block annotations and other datastructures. */
2710 void
2711 delete_tree_cfg_annotations (void)
2713 basic_block bb;
2714 if (n_basic_blocks > 0)
2715 free_blocks_annotations ();
2717 label_to_block_map = NULL;
2718 free_rbi_pool ();
2719 FOR_EACH_BB (bb)
2720 bb->rbi = NULL;
2724 /* Return the first statement in basic block BB. */
2726 tree
2727 first_stmt (basic_block bb)
2729 block_stmt_iterator i = bsi_start (bb);
2730 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2734 /* Return the last statement in basic block BB. */
2736 tree
2737 last_stmt (basic_block bb)
2739 block_stmt_iterator b = bsi_last (bb);
2740 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2744 /* Return a pointer to the last statement in block BB. */
2746 tree *
2747 last_stmt_ptr (basic_block bb)
2749 block_stmt_iterator last = bsi_last (bb);
2750 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2754 /* Return the last statement of an otherwise empty block. Return NULL
2755 if the block is totally empty, or if it contains more than one
2756 statement. */
2758 tree
2759 last_and_only_stmt (basic_block bb)
2761 block_stmt_iterator i = bsi_last (bb);
2762 tree last, prev;
2764 if (bsi_end_p (i))
2765 return NULL_TREE;
2767 last = bsi_stmt (i);
2768 bsi_prev (&i);
2769 if (bsi_end_p (i))
2770 return last;
2772 /* Empty statements should no longer appear in the instruction stream.
2773 Everything that might have appeared before should be deleted by
2774 remove_useless_stmts, and the optimizers should just bsi_remove
2775 instead of smashing with build_empty_stmt.
2777 Thus the only thing that should appear here in a block containing
2778 one executable statement is a label. */
2779 prev = bsi_stmt (i);
2780 if (TREE_CODE (prev) == LABEL_EXPR)
2781 return last;
2782 else
2783 return NULL_TREE;
2787 /* Mark BB as the basic block holding statement T. */
2789 void
2790 set_bb_for_stmt (tree t, basic_block bb)
2792 if (TREE_CODE (t) == PHI_NODE)
2793 PHI_BB (t) = bb;
2794 else if (TREE_CODE (t) == STATEMENT_LIST)
2796 tree_stmt_iterator i;
2797 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2798 set_bb_for_stmt (tsi_stmt (i), bb);
2800 else
2802 stmt_ann_t ann = get_stmt_ann (t);
2803 ann->bb = bb;
2805 /* If the statement is a label, add the label to block-to-labels map
2806 so that we can speed up edge creation for GOTO_EXPRs. */
2807 if (TREE_CODE (t) == LABEL_EXPR)
2809 int uid;
2811 t = LABEL_EXPR_LABEL (t);
2812 uid = LABEL_DECL_UID (t);
2813 if (uid == -1)
2815 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2816 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2817 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2819 else
2820 /* We're moving an existing label. Make sure that we've
2821 removed it from the old block. */
2822 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2823 VARRAY_BB (label_to_block_map, uid) = bb;
2828 /* Finds iterator for STMT. */
2830 extern block_stmt_iterator
2831 bsi_for_stmt (tree stmt)
2833 block_stmt_iterator bsi;
2835 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2836 if (bsi_stmt (bsi) == stmt)
2837 return bsi;
2839 gcc_unreachable ();
2842 /* Insert statement (or statement list) T before the statement
2843 pointed-to by iterator I. M specifies how to update iterator I
2844 after insertion (see enum bsi_iterator_update). */
2846 void
2847 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2849 set_bb_for_stmt (t, i->bb);
2850 tsi_link_before (&i->tsi, t, m);
2851 modify_stmt (t);
2855 /* Insert statement (or statement list) T after the statement
2856 pointed-to by iterator I. M specifies how to update iterator I
2857 after insertion (see enum bsi_iterator_update). */
2859 void
2860 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2862 set_bb_for_stmt (t, i->bb);
2863 tsi_link_after (&i->tsi, t, m);
2864 modify_stmt (t);
2868 /* Remove the statement pointed to by iterator I. The iterator is updated
2869 to the next statement. */
2871 void
2872 bsi_remove (block_stmt_iterator *i)
2874 tree t = bsi_stmt (*i);
2875 set_bb_for_stmt (t, NULL);
2876 tsi_delink (&i->tsi);
2880 /* Move the statement at FROM so it comes right after the statement at TO. */
2882 void
2883 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2885 tree stmt = bsi_stmt (*from);
2886 bsi_remove (from);
2887 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2891 /* Move the statement at FROM so it comes right before the statement at TO. */
2893 void
2894 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2896 tree stmt = bsi_stmt (*from);
2897 bsi_remove (from);
2898 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2902 /* Move the statement at FROM to the end of basic block BB. */
2904 void
2905 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2907 block_stmt_iterator last = bsi_last (bb);
2909 /* Have to check bsi_end_p because it could be an empty block. */
2910 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2911 bsi_move_before (from, &last);
2912 else
2913 bsi_move_after (from, &last);
2917 /* Replace the contents of the statement pointed to by iterator BSI
2918 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2919 information of the original statement is preserved. */
2921 void
2922 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2924 int eh_region;
2925 tree orig_stmt = bsi_stmt (*bsi);
2927 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2928 set_bb_for_stmt (stmt, bsi->bb);
2930 /* Preserve EH region information from the original statement, if
2931 requested by the caller. */
2932 if (preserve_eh_info)
2934 eh_region = lookup_stmt_eh_region (orig_stmt);
2935 if (eh_region >= 0)
2936 add_stmt_to_eh_region (stmt, eh_region);
2939 *bsi_stmt_ptr (*bsi) = stmt;
2940 modify_stmt (stmt);
2944 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2945 is made to place the statement in an existing basic block, but
2946 sometimes that isn't possible. When it isn't possible, the edge is
2947 split and the statement is added to the new block.
2949 In all cases, the returned *BSI points to the correct location. The
2950 return value is true if insertion should be done after the location,
2951 or false if it should be done before the location. If new basic block
2952 has to be created, it is stored in *NEW_BB. */
2954 static bool
2955 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2956 basic_block *new_bb)
2958 basic_block dest, src;
2959 tree tmp;
2961 dest = e->dest;
2962 restart:
2964 /* If the destination has one predecessor which has no PHI nodes,
2965 insert there. Except for the exit block.
2967 The requirement for no PHI nodes could be relaxed. Basically we
2968 would have to examine the PHIs to prove that none of them used
2969 the value set by the statement we want to insert on E. That
2970 hardly seems worth the effort. */
2971 if (EDGE_COUNT (dest->preds) == 1
2972 && ! phi_nodes (dest)
2973 && dest != EXIT_BLOCK_PTR)
2975 *bsi = bsi_start (dest);
2976 if (bsi_end_p (*bsi))
2977 return true;
2979 /* Make sure we insert after any leading labels. */
2980 tmp = bsi_stmt (*bsi);
2981 while (TREE_CODE (tmp) == LABEL_EXPR)
2983 bsi_next (bsi);
2984 if (bsi_end_p (*bsi))
2985 break;
2986 tmp = bsi_stmt (*bsi);
2989 if (bsi_end_p (*bsi))
2991 *bsi = bsi_last (dest);
2992 return true;
2994 else
2995 return false;
2998 /* If the source has one successor, the edge is not abnormal and
2999 the last statement does not end a basic block, insert there.
3000 Except for the entry block. */
3001 src = e->src;
3002 if ((e->flags & EDGE_ABNORMAL) == 0
3003 && EDGE_COUNT (src->succs) == 1
3004 && src != ENTRY_BLOCK_PTR)
3006 *bsi = bsi_last (src);
3007 if (bsi_end_p (*bsi))
3008 return true;
3010 tmp = bsi_stmt (*bsi);
3011 if (!stmt_ends_bb_p (tmp))
3012 return true;
3014 /* Insert code just before returning the value. We may need to decompose
3015 the return in the case it contains non-trivial operand. */
3016 if (TREE_CODE (tmp) == RETURN_EXPR)
3018 tree op = TREE_OPERAND (tmp, 0);
3019 if (!is_gimple_val (op))
3021 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3022 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3023 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3025 bsi_prev (bsi);
3026 return true;
3030 /* Otherwise, create a new basic block, and split this edge. */
3031 dest = split_edge (e);
3032 if (new_bb)
3033 *new_bb = dest;
3034 e = EDGE_PRED (dest, 0);
3035 goto restart;
3039 /* This routine will commit all pending edge insertions, creating any new
3040 basic blocks which are necessary. */
3042 void
3043 bsi_commit_edge_inserts (void)
3045 basic_block bb;
3046 edge e;
3047 edge_iterator ei;
3049 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL);
3051 FOR_EACH_BB (bb)
3052 FOR_EACH_EDGE (e, ei, bb->succs)
3053 bsi_commit_one_edge_insert (e, NULL);
3057 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3058 to this block, otherwise set it to NULL. */
3060 void
3061 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3063 if (new_bb)
3064 *new_bb = NULL;
3065 if (PENDING_STMT (e))
3067 block_stmt_iterator bsi;
3068 tree stmt = PENDING_STMT (e);
3070 PENDING_STMT (e) = NULL_TREE;
3072 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3073 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3074 else
3075 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3080 /* Add STMT to the pending list of edge E. No actual insertion is
3081 made until a call to bsi_commit_edge_inserts () is made. */
3083 void
3084 bsi_insert_on_edge (edge e, tree stmt)
3086 append_to_statement_list (stmt, &PENDING_STMT (e));
3089 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
3090 be created, it is returned. */
3092 basic_block
3093 bsi_insert_on_edge_immediate (edge e, tree stmt)
3095 block_stmt_iterator bsi;
3096 basic_block new_bb = NULL;
3098 gcc_assert (!PENDING_STMT (e));
3100 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3101 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3102 else
3103 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3105 return new_bb;
3108 /*---------------------------------------------------------------------------
3109 Tree specific functions for CFG manipulation
3110 ---------------------------------------------------------------------------*/
3112 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3114 static void
3115 reinstall_phi_args (edge new_edge, edge old_edge)
3117 tree var, phi;
3119 if (!PENDING_STMT (old_edge))
3120 return;
3122 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3123 var && phi;
3124 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3126 tree result = TREE_PURPOSE (var);
3127 tree arg = TREE_VALUE (var);
3129 gcc_assert (result == PHI_RESULT (phi));
3131 add_phi_arg (phi, arg, new_edge);
3134 PENDING_STMT (old_edge) = NULL;
3137 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3138 Abort on abnormal edges. */
3140 static basic_block
3141 tree_split_edge (edge edge_in)
3143 basic_block new_bb, after_bb, dest, src;
3144 edge new_edge, e;
3146 /* Abnormal edges cannot be split. */
3147 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3149 src = edge_in->src;
3150 dest = edge_in->dest;
3152 /* Place the new block in the block list. Try to keep the new block
3153 near its "logical" location. This is of most help to humans looking
3154 at debugging dumps. */
3155 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3156 after_bb = edge_in->src;
3157 else
3158 after_bb = dest->prev_bb;
3160 new_bb = create_empty_bb (after_bb);
3161 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3162 new_bb->count = edge_in->count;
3163 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3164 new_edge->probability = REG_BR_PROB_BASE;
3165 new_edge->count = edge_in->count;
3167 e = redirect_edge_and_branch (edge_in, new_bb);
3168 gcc_assert (e);
3169 reinstall_phi_args (new_edge, e);
3171 return new_bb;
3175 /* Return true when BB has label LABEL in it. */
3177 static bool
3178 has_label_p (basic_block bb, tree label)
3180 block_stmt_iterator bsi;
3182 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3184 tree stmt = bsi_stmt (bsi);
3186 if (TREE_CODE (stmt) != LABEL_EXPR)
3187 return false;
3188 if (LABEL_EXPR_LABEL (stmt) == label)
3189 return true;
3191 return false;
3195 /* Callback for walk_tree, check that all elements with address taken are
3196 properly noticed as such. */
3198 static tree
3199 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3201 tree t = *tp, x;
3203 if (TYPE_P (t))
3204 *walk_subtrees = 0;
3206 /* Check operand N for being valid GIMPLE and give error MSG if not.
3207 We check for constants explicitly since they are not considered
3208 gimple invariants if they overflowed. */
3209 #define CHECK_OP(N, MSG) \
3210 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3211 && !is_gimple_val (TREE_OPERAND (t, N))) \
3212 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3214 switch (TREE_CODE (t))
3216 case SSA_NAME:
3217 if (SSA_NAME_IN_FREE_LIST (t))
3219 error ("SSA name in freelist but still referenced");
3220 return *tp;
3222 break;
3224 case MODIFY_EXPR:
3225 x = TREE_OPERAND (t, 0);
3226 if (TREE_CODE (x) == BIT_FIELD_REF
3227 && is_gimple_reg (TREE_OPERAND (x, 0)))
3229 error ("GIMPLE register modified with BIT_FIELD_REF");
3230 return t;
3232 break;
3234 case ADDR_EXPR:
3235 /* Skip any references (they will be checked when we recurse down the
3236 tree) and ensure that any variable used as a prefix is marked
3237 addressable. */
3238 for (x = TREE_OPERAND (t, 0);
3239 handled_component_p (x);
3240 x = TREE_OPERAND (x, 0))
3243 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3244 return NULL;
3245 if (!TREE_ADDRESSABLE (x))
3247 error ("address taken, but ADDRESSABLE bit not set");
3248 return x;
3250 break;
3252 case COND_EXPR:
3253 x = COND_EXPR_COND (t);
3254 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3256 error ("non-boolean used in condition");
3257 return x;
3259 break;
3261 case NOP_EXPR:
3262 case CONVERT_EXPR:
3263 case FIX_TRUNC_EXPR:
3264 case FIX_CEIL_EXPR:
3265 case FIX_FLOOR_EXPR:
3266 case FIX_ROUND_EXPR:
3267 case FLOAT_EXPR:
3268 case NEGATE_EXPR:
3269 case ABS_EXPR:
3270 case BIT_NOT_EXPR:
3271 case NON_LVALUE_EXPR:
3272 case TRUTH_NOT_EXPR:
3273 CHECK_OP (0, "Invalid operand to unary operator");
3274 break;
3276 case REALPART_EXPR:
3277 case IMAGPART_EXPR:
3278 case COMPONENT_REF:
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 case BIT_FIELD_REF:
3282 case VIEW_CONVERT_EXPR:
3283 /* We have a nest of references. Verify that each of the operands
3284 that determine where to reference is either a constant or a variable,
3285 verify that the base is valid, and then show we've already checked
3286 the subtrees. */
3287 while (handled_component_p (t))
3289 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3290 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3291 else if (TREE_CODE (t) == ARRAY_REF
3292 || TREE_CODE (t) == ARRAY_RANGE_REF)
3294 CHECK_OP (1, "Invalid array index.");
3295 if (TREE_OPERAND (t, 2))
3296 CHECK_OP (2, "Invalid array lower bound.");
3297 if (TREE_OPERAND (t, 3))
3298 CHECK_OP (3, "Invalid array stride.");
3300 else if (TREE_CODE (t) == BIT_FIELD_REF)
3302 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3303 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3306 t = TREE_OPERAND (t, 0);
3309 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3311 error ("Invalid reference prefix.");
3312 return t;
3314 *walk_subtrees = 0;
3315 break;
3317 case LT_EXPR:
3318 case LE_EXPR:
3319 case GT_EXPR:
3320 case GE_EXPR:
3321 case EQ_EXPR:
3322 case NE_EXPR:
3323 case UNORDERED_EXPR:
3324 case ORDERED_EXPR:
3325 case UNLT_EXPR:
3326 case UNLE_EXPR:
3327 case UNGT_EXPR:
3328 case UNGE_EXPR:
3329 case UNEQ_EXPR:
3330 case LTGT_EXPR:
3331 case PLUS_EXPR:
3332 case MINUS_EXPR:
3333 case MULT_EXPR:
3334 case TRUNC_DIV_EXPR:
3335 case CEIL_DIV_EXPR:
3336 case FLOOR_DIV_EXPR:
3337 case ROUND_DIV_EXPR:
3338 case TRUNC_MOD_EXPR:
3339 case CEIL_MOD_EXPR:
3340 case FLOOR_MOD_EXPR:
3341 case ROUND_MOD_EXPR:
3342 case RDIV_EXPR:
3343 case EXACT_DIV_EXPR:
3344 case MIN_EXPR:
3345 case MAX_EXPR:
3346 case LSHIFT_EXPR:
3347 case RSHIFT_EXPR:
3348 case LROTATE_EXPR:
3349 case RROTATE_EXPR:
3350 case BIT_IOR_EXPR:
3351 case BIT_XOR_EXPR:
3352 case BIT_AND_EXPR:
3353 CHECK_OP (0, "Invalid operand to binary operator");
3354 CHECK_OP (1, "Invalid operand to binary operator");
3355 break;
3357 default:
3358 break;
3360 return NULL;
3362 #undef CHECK_OP
3366 /* Verify STMT, return true if STMT is not in GIMPLE form.
3367 TODO: Implement type checking. */
3369 static bool
3370 verify_stmt (tree stmt, bool last_in_block)
3372 tree addr;
3374 if (!is_gimple_stmt (stmt))
3376 error ("Is not a valid GIMPLE statement.");
3377 goto fail;
3380 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3381 if (addr)
3383 debug_generic_stmt (addr);
3384 return true;
3387 /* If the statement is marked as part of an EH region, then it is
3388 expected that the statement could throw. Verify that when we
3389 have optimizations that simplify statements such that we prove
3390 that they cannot throw, that we update other data structures
3391 to match. */
3392 if (lookup_stmt_eh_region (stmt) >= 0)
3394 if (!tree_could_throw_p (stmt))
3396 error ("Statement marked for throw, but doesn%'t.");
3397 goto fail;
3399 if (!last_in_block && tree_can_throw_internal (stmt))
3401 error ("Statement marked for throw in middle of block.");
3402 goto fail;
3406 return false;
3408 fail:
3409 debug_generic_stmt (stmt);
3410 return true;
3414 /* Return true when the T can be shared. */
3416 static bool
3417 tree_node_can_be_shared (tree t)
3419 if (IS_TYPE_OR_DECL_P (t)
3420 /* We check for constants explicitly since they are not considered
3421 gimple invariants if they overflowed. */
3422 || CONSTANT_CLASS_P (t)
3423 || is_gimple_min_invariant (t)
3424 || TREE_CODE (t) == SSA_NAME
3425 || t == error_mark_node)
3426 return true;
3428 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3429 return true;
3431 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3432 /* We check for constants explicitly since they are not considered
3433 gimple invariants if they overflowed. */
3434 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3435 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3436 || (TREE_CODE (t) == COMPONENT_REF
3437 || TREE_CODE (t) == REALPART_EXPR
3438 || TREE_CODE (t) == IMAGPART_EXPR))
3439 t = TREE_OPERAND (t, 0);
3441 if (DECL_P (t))
3442 return true;
3444 return false;
3448 /* Called via walk_trees. Verify tree sharing. */
3450 static tree
3451 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3453 htab_t htab = (htab_t) data;
3454 void **slot;
3456 if (tree_node_can_be_shared (*tp))
3458 *walk_subtrees = false;
3459 return NULL;
3462 slot = htab_find_slot (htab, *tp, INSERT);
3463 if (*slot)
3464 return *slot;
3465 *slot = *tp;
3467 return NULL;
3471 /* Verify the GIMPLE statement chain. */
3473 void
3474 verify_stmts (void)
3476 basic_block bb;
3477 block_stmt_iterator bsi;
3478 bool err = false;
3479 htab_t htab;
3480 tree addr;
3482 timevar_push (TV_TREE_STMT_VERIFY);
3483 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3485 FOR_EACH_BB (bb)
3487 tree phi;
3488 int i;
3490 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3492 int phi_num_args = PHI_NUM_ARGS (phi);
3494 for (i = 0; i < phi_num_args; i++)
3496 tree t = PHI_ARG_DEF (phi, i);
3497 tree addr;
3499 /* Addressable variables do have SSA_NAMEs but they
3500 are not considered gimple values. */
3501 if (TREE_CODE (t) != SSA_NAME
3502 && TREE_CODE (t) != FUNCTION_DECL
3503 && !is_gimple_val (t))
3505 error ("PHI def is not a GIMPLE value");
3506 debug_generic_stmt (phi);
3507 debug_generic_stmt (t);
3508 err |= true;
3511 addr = walk_tree (&t, verify_expr, NULL, NULL);
3512 if (addr)
3514 debug_generic_stmt (addr);
3515 err |= true;
3518 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3519 if (addr)
3521 error ("Incorrect sharing of tree nodes");
3522 debug_generic_stmt (phi);
3523 debug_generic_stmt (addr);
3524 err |= true;
3529 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3531 tree stmt = bsi_stmt (bsi);
3532 bsi_next (&bsi);
3533 err |= verify_stmt (stmt, bsi_end_p (bsi));
3534 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3535 if (addr)
3537 error ("Incorrect sharing of tree nodes");
3538 debug_generic_stmt (stmt);
3539 debug_generic_stmt (addr);
3540 err |= true;
3545 if (err)
3546 internal_error ("verify_stmts failed.");
3548 htab_delete (htab);
3549 timevar_pop (TV_TREE_STMT_VERIFY);
3553 /* Verifies that the flow information is OK. */
3555 static int
3556 tree_verify_flow_info (void)
3558 int err = 0;
3559 basic_block bb;
3560 block_stmt_iterator bsi;
3561 tree stmt;
3562 edge e;
3563 edge_iterator ei;
3565 if (ENTRY_BLOCK_PTR->stmt_list)
3567 error ("ENTRY_BLOCK has a statement list associated with it\n");
3568 err = 1;
3571 if (EXIT_BLOCK_PTR->stmt_list)
3573 error ("EXIT_BLOCK has a statement list associated with it\n");
3574 err = 1;
3577 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3578 if (e->flags & EDGE_FALLTHRU)
3580 error ("Fallthru to exit from bb %d\n", e->src->index);
3581 err = 1;
3584 FOR_EACH_BB (bb)
3586 bool found_ctrl_stmt = false;
3588 /* Skip labels on the start of basic block. */
3589 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3591 if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR)
3592 break;
3594 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb)
3596 tree stmt = bsi_stmt (bsi);
3597 error ("Label %s to block does not match in bb %d\n",
3598 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3599 bb->index);
3600 err = 1;
3603 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi)))
3604 != current_function_decl)
3606 tree stmt = bsi_stmt (bsi);
3607 error ("Label %s has incorrect context in bb %d\n",
3608 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3609 bb->index);
3610 err = 1;
3614 /* Verify that body of basic block BB is free of control flow. */
3615 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3617 tree stmt = bsi_stmt (bsi);
3619 if (found_ctrl_stmt)
3621 error ("Control flow in the middle of basic block %d\n",
3622 bb->index);
3623 err = 1;
3626 if (stmt_ends_bb_p (stmt))
3627 found_ctrl_stmt = true;
3629 if (TREE_CODE (stmt) == LABEL_EXPR)
3631 error ("Label %s in the middle of basic block %d\n",
3632 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3633 bb->index);
3634 err = 1;
3637 bsi = bsi_last (bb);
3638 if (bsi_end_p (bsi))
3639 continue;
3641 stmt = bsi_stmt (bsi);
3643 if (is_ctrl_stmt (stmt))
3645 FOR_EACH_EDGE (e, ei, bb->succs)
3646 if (e->flags & EDGE_FALLTHRU)
3648 error ("Fallthru edge after a control statement in bb %d \n",
3649 bb->index);
3650 err = 1;
3654 switch (TREE_CODE (stmt))
3656 case COND_EXPR:
3658 edge true_edge;
3659 edge false_edge;
3660 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3661 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3663 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3664 err = 1;
3667 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3669 if (!true_edge || !false_edge
3670 || !(true_edge->flags & EDGE_TRUE_VALUE)
3671 || !(false_edge->flags & EDGE_FALSE_VALUE)
3672 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3673 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3674 || EDGE_COUNT (bb->succs) >= 3)
3676 error ("Wrong outgoing edge flags at end of bb %d\n",
3677 bb->index);
3678 err = 1;
3681 if (!has_label_p (true_edge->dest,
3682 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3684 error ("%<then%> label does not match edge at end of bb %d\n",
3685 bb->index);
3686 err = 1;
3689 if (!has_label_p (false_edge->dest,
3690 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3692 error ("%<else%> label does not match edge at end of bb %d\n",
3693 bb->index);
3694 err = 1;
3697 break;
3699 case GOTO_EXPR:
3700 if (simple_goto_p (stmt))
3702 error ("Explicit goto at end of bb %d\n", bb->index);
3703 err = 1;
3705 else
3707 /* FIXME. We should double check that the labels in the
3708 destination blocks have their address taken. */
3709 FOR_EACH_EDGE (e, ei, bb->succs)
3710 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3711 | EDGE_FALSE_VALUE))
3712 || !(e->flags & EDGE_ABNORMAL))
3714 error ("Wrong outgoing edge flags at end of bb %d\n",
3715 bb->index);
3716 err = 1;
3719 break;
3721 case RETURN_EXPR:
3722 if (EDGE_COUNT (bb->succs) != 1
3723 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3724 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3726 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3727 err = 1;
3729 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
3731 error ("Return edge does not point to exit in bb %d\n",
3732 bb->index);
3733 err = 1;
3735 break;
3737 case SWITCH_EXPR:
3739 tree prev;
3740 edge e;
3741 size_t i, n;
3742 tree vec;
3744 vec = SWITCH_LABELS (stmt);
3745 n = TREE_VEC_LENGTH (vec);
3747 /* Mark all the destination basic blocks. */
3748 for (i = 0; i < n; ++i)
3750 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3751 basic_block label_bb = label_to_block (lab);
3753 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3754 label_bb->aux = (void *)1;
3757 /* Verify that the case labels are sorted. */
3758 prev = TREE_VEC_ELT (vec, 0);
3759 for (i = 1; i < n - 1; ++i)
3761 tree c = TREE_VEC_ELT (vec, i);
3762 if (! CASE_LOW (c))
3764 error ("Found default case not at end of case vector");
3765 err = 1;
3766 continue;
3768 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3770 error ("Case labels not sorted:\n ");
3771 print_generic_expr (stderr, prev, 0);
3772 fprintf (stderr," is greater than ");
3773 print_generic_expr (stderr, c, 0);
3774 fprintf (stderr," but comes before it.\n");
3775 err = 1;
3777 prev = c;
3779 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3781 error ("No default case found at end of case vector");
3782 err = 1;
3785 FOR_EACH_EDGE (e, ei, bb->succs)
3787 if (!e->dest->aux)
3789 error ("Extra outgoing edge %d->%d\n",
3790 bb->index, e->dest->index);
3791 err = 1;
3793 e->dest->aux = (void *)2;
3794 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3795 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3797 error ("Wrong outgoing edge flags at end of bb %d\n",
3798 bb->index);
3799 err = 1;
3803 /* Check that we have all of them. */
3804 for (i = 0; i < n; ++i)
3806 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3807 basic_block label_bb = label_to_block (lab);
3809 if (label_bb->aux != (void *)2)
3811 error ("Missing edge %i->%i",
3812 bb->index, label_bb->index);
3813 err = 1;
3817 FOR_EACH_EDGE (e, ei, bb->succs)
3818 e->dest->aux = (void *)0;
3821 default: ;
3825 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3826 verify_dominators (CDI_DOMINATORS);
3828 return err;
3832 /* Updates phi nodes after creating a forwarder block joined
3833 by edge FALLTHRU. */
3835 static void
3836 tree_make_forwarder_block (edge fallthru)
3838 edge e;
3839 edge_iterator ei;
3840 basic_block dummy, bb;
3841 tree phi, new_phi, var;
3843 dummy = fallthru->src;
3844 bb = fallthru->dest;
3846 if (EDGE_COUNT (bb->preds) == 1)
3847 return;
3849 /* If we redirected a branch we must create new phi nodes at the
3850 start of BB. */
3851 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3853 var = PHI_RESULT (phi);
3854 new_phi = create_phi_node (var, bb);
3855 SSA_NAME_DEF_STMT (var) = new_phi;
3856 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3857 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3860 /* Ensure that the PHI node chain is in the same order. */
3861 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3863 /* Add the arguments we have stored on edges. */
3864 FOR_EACH_EDGE (e, ei, bb->preds)
3866 if (e == fallthru)
3867 continue;
3869 flush_pending_stmts (e);
3874 /* Return true if basic block BB does nothing except pass control
3875 flow to another block and that we can safely insert a label at
3876 the start of the successor block.
3878 As a precondition, we require that BB be not equal to
3879 ENTRY_BLOCK_PTR. */
3881 static bool
3882 tree_forwarder_block_p (basic_block bb)
3884 block_stmt_iterator bsi;
3886 /* BB must have a single outgoing edge. */
3887 if (EDGE_COUNT (bb->succs) != 1
3888 /* BB can not have any PHI nodes. This could potentially be
3889 relaxed early in compilation if we re-rewrote the variables
3890 appearing in any PHI nodes in forwarder blocks. */
3891 || phi_nodes (bb)
3892 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3893 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
3894 /* BB may not have an abnormal outgoing edge. */
3895 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
3896 return false;
3898 #if ENABLE_CHECKING
3899 gcc_assert (bb != ENTRY_BLOCK_PTR);
3900 #endif
3902 /* Now walk through the statements. We can ignore labels, anything else
3903 means this is not a forwarder block. */
3904 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3906 tree stmt = bsi_stmt (bsi);
3908 switch (TREE_CODE (stmt))
3910 case LABEL_EXPR:
3911 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3912 return false;
3913 break;
3915 default:
3916 return false;
3920 if (find_edge (ENTRY_BLOCK_PTR, bb))
3921 return false;
3923 return true;
3926 /* Thread jumps from BB. */
3928 static bool
3929 thread_jumps_from_bb (basic_block bb)
3931 edge_iterator ei;
3932 edge e;
3933 bool retval = false;
3935 /* Examine each of our block's successors to see if it is
3936 forwardable. */
3937 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3939 int freq;
3940 gcov_type count;
3941 edge last, old;
3942 basic_block dest, tmp, curr, old_dest;
3943 tree phi;
3945 /* If the edge is abnormal or its destination is not
3946 forwardable, then there's nothing to do. */
3947 if ((e->flags & EDGE_ABNORMAL)
3948 || !bb_ann (e->dest)->forwardable)
3950 ei_next (&ei);
3951 continue;
3954 /* Now walk through as many forwarder blocks as possible to find
3955 the ultimate destination we want to thread our jump to. */
3956 last = EDGE_SUCC (e->dest, 0);
3957 bb_ann (e->dest)->forwardable = 0;
3958 for (dest = EDGE_SUCC (e->dest, 0)->dest;
3959 bb_ann (dest)->forwardable;
3960 last = EDGE_SUCC (dest, 0),
3961 dest = EDGE_SUCC (dest, 0)->dest)
3962 bb_ann (dest)->forwardable = 0;
3964 /* Reset the forwardable marks to 1. */
3965 for (tmp = e->dest;
3966 tmp != dest;
3967 tmp = EDGE_SUCC (tmp, 0)->dest)
3968 bb_ann (tmp)->forwardable = 1;
3970 if (dest == e->dest)
3972 ei_next (&ei);
3973 continue;
3976 old = find_edge (bb, dest);
3977 if (old)
3979 /* If there already is an edge, check whether the values in
3980 phi nodes differ. */
3981 if (!phi_alternatives_equal (dest, last, old))
3983 /* The previous block is forwarder. Redirect our jump
3984 to that target instead since we know it has no PHI
3985 nodes that will need updating. */
3986 dest = last->src;
3988 /* That might mean that no forwarding at all is
3989 possible. */
3990 if (dest == e->dest)
3992 ei_next (&ei);
3993 continue;
3996 old = find_edge (bb, dest);
4000 /* Perform the redirection. */
4001 retval = true;
4002 count = e->count;
4003 freq = EDGE_FREQUENCY (e);
4004 old_dest = e->dest;
4005 e = redirect_edge_and_branch (e, dest);
4007 /* Update the profile. */
4008 if (profile_status != PROFILE_ABSENT)
4009 for (curr = old_dest;
4010 curr != dest;
4011 curr = EDGE_SUCC (curr, 0)->dest)
4013 curr->frequency -= freq;
4014 if (curr->frequency < 0)
4015 curr->frequency = 0;
4016 curr->count -= count;
4017 if (curr->count < 0)
4018 curr->count = 0;
4019 EDGE_SUCC (curr, 0)->count -= count;
4020 if (EDGE_SUCC (curr, 0)->count < 0)
4021 EDGE_SUCC (curr, 0)->count = 0;
4024 if (!old)
4026 /* Update PHI nodes. We know that the new argument should
4027 have the same value as the argument associated with LAST.
4028 Otherwise we would have changed our target block
4029 above. */
4030 int arg = last->dest_idx;
4032 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4034 tree def = PHI_ARG_DEF (phi, arg);
4035 gcc_assert (def != NULL_TREE);
4036 add_phi_arg (phi, def, e);
4040 /* Remove the unreachable blocks (observe that if all blocks
4041 were reachable before, only those in the path we threaded
4042 over and did not have any predecessor outside of the path
4043 become unreachable). */
4044 for (; old_dest != dest; old_dest = tmp)
4046 tmp = EDGE_SUCC (old_dest, 0)->dest;
4048 if (EDGE_COUNT (old_dest->preds) > 0)
4049 break;
4051 delete_basic_block (old_dest);
4054 /* Update the dominators. */
4055 if (dom_info_available_p (CDI_DOMINATORS))
4057 /* If the dominator of the destination was in the
4058 path, set its dominator to the start of the
4059 redirected edge. */
4060 if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
4061 set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
4063 /* Now proceed like if we forwarded just over one edge at a
4064 time. Algorithm for forwarding edge S --> A over
4065 edge A --> B then is
4067 if (idom (B) == A
4068 && !dominated_by (S, B))
4069 idom (B) = idom (A);
4070 recount_idom (A); */
4072 for (; old_dest != dest; old_dest = tmp)
4074 basic_block dom;
4076 tmp = EDGE_SUCC (old_dest, 0)->dest;
4078 if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest
4079 && !dominated_by_p (CDI_DOMINATORS, bb, tmp))
4081 dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
4082 set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
4085 dom = recount_dominator (CDI_DOMINATORS, old_dest);
4086 set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
4091 return retval;
4095 /* Thread jumps over empty statements.
4097 This code should _not_ thread over obviously equivalent conditions
4098 as that requires nontrivial updates to the SSA graph.
4100 As a precondition, we require that all basic blocks be reachable.
4101 That is, there should be no opportunities left for
4102 delete_unreachable_blocks. */
4104 static bool
4105 thread_jumps (void)
4107 basic_block bb;
4108 bool retval = false;
4109 basic_block *worklist = xmalloc (sizeof (basic_block) * last_basic_block);
4110 basic_block *current = worklist;
4112 FOR_EACH_BB (bb)
4114 bb_ann (bb)->forwardable = tree_forwarder_block_p (bb);
4115 bb->flags &= ~BB_VISITED;
4118 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
4119 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
4120 ENTRY_BLOCK_PTR->flags |= BB_VISITED;
4122 /* Initialize WORKLIST by putting non-forwarder blocks that
4123 immediately precede forwarder blocks because those are the ones
4124 that we know we can thread jumps from. We use BB_VISITED to
4125 indicate whether a given basic block is in WORKLIST or not,
4126 thereby avoiding duplicates in WORKLIST. */
4127 FOR_EACH_BB (bb)
4129 edge_iterator ei;
4130 edge e;
4132 /* We are not interested in finding non-forwarder blocks
4133 directly. We want to find non-forwarder blocks as
4134 predecessors of a forwarder block. */
4135 if (!bb_ann (bb)->forwardable)
4136 continue;
4138 /* Now we know BB is a forwarder block. Visit each of its
4139 incoming edges and add to WORKLIST all non-forwarder blocks
4140 among BB's predecessors. */
4141 FOR_EACH_EDGE (e, ei, bb->preds)
4143 /* We don't want to put a duplicate into WORKLIST. */
4144 if ((e->src->flags & BB_VISITED) == 0
4145 /* We are not interested in threading jumps from a forwarder
4146 block. */
4147 && !bb_ann (e->src)->forwardable)
4149 e->src->flags |= BB_VISITED;
4150 *current++ = e->src;
4155 /* Now let's drain WORKLIST. */
4156 while (worklist != current)
4158 bb = *--current;
4160 /* BB is no longer in WORKLIST, so clear BB_VISITED. */
4161 bb->flags &= ~BB_VISITED;
4163 if (thread_jumps_from_bb (bb))
4165 retval = true;
4167 if (tree_forwarder_block_p (bb))
4169 edge_iterator ej;
4170 edge f;
4172 bb_ann (bb)->forwardable = true;
4174 /* Attempts to thread through BB may have been blocked
4175 because BB was not a forwarder block before. Now
4176 that BB is a forwarder block, we should revisit BB's
4177 predecessors. */
4178 FOR_EACH_EDGE (f, ej, bb->preds)
4180 /* We don't want to put a duplicate into WORKLIST. */
4181 if ((f->src->flags & BB_VISITED) == 0
4182 /* We are not interested in threading jumps from a
4183 forwarder block. */
4184 && !bb_ann (f->src)->forwardable)
4186 f->src->flags |= BB_VISITED;
4187 *current++ = f->src;
4194 ENTRY_BLOCK_PTR->flags &= ~BB_VISITED;
4196 free (worklist);
4198 return retval;
4202 /* Return a non-special label in the head of basic block BLOCK.
4203 Create one if it doesn't exist. */
4205 tree
4206 tree_block_label (basic_block bb)
4208 block_stmt_iterator i, s = bsi_start (bb);
4209 bool first = true;
4210 tree label, stmt;
4212 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4214 stmt = bsi_stmt (i);
4215 if (TREE_CODE (stmt) != LABEL_EXPR)
4216 break;
4217 label = LABEL_EXPR_LABEL (stmt);
4218 if (!DECL_NONLOCAL (label))
4220 if (!first)
4221 bsi_move_before (&i, &s);
4222 return label;
4226 label = create_artificial_label ();
4227 stmt = build1 (LABEL_EXPR, void_type_node, label);
4228 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4229 return label;
4233 /* Attempt to perform edge redirection by replacing a possibly complex
4234 jump instruction by a goto or by removing the jump completely.
4235 This can apply only if all edges now point to the same block. The
4236 parameters and return values are equivalent to
4237 redirect_edge_and_branch. */
4239 static edge
4240 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4242 basic_block src = e->src;
4243 block_stmt_iterator b;
4244 tree stmt;
4246 /* We can replace or remove a complex jump only when we have exactly
4247 two edges. */
4248 if (EDGE_COUNT (src->succs) != 2
4249 /* Verify that all targets will be TARGET. Specifically, the
4250 edge that is not E must also go to TARGET. */
4251 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4252 return NULL;
4254 b = bsi_last (src);
4255 if (bsi_end_p (b))
4256 return NULL;
4257 stmt = bsi_stmt (b);
4259 if (TREE_CODE (stmt) == COND_EXPR
4260 || TREE_CODE (stmt) == SWITCH_EXPR)
4262 bsi_remove (&b);
4263 e = ssa_redirect_edge (e, target);
4264 e->flags = EDGE_FALLTHRU;
4265 return e;
4268 return NULL;
4272 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4273 edge representing the redirected branch. */
4275 static edge
4276 tree_redirect_edge_and_branch (edge e, basic_block dest)
4278 basic_block bb = e->src;
4279 block_stmt_iterator bsi;
4280 edge ret;
4281 tree label, stmt;
4283 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4284 return NULL;
4286 if (e->src != ENTRY_BLOCK_PTR
4287 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4288 return ret;
4290 if (e->dest == dest)
4291 return NULL;
4293 label = tree_block_label (dest);
4295 bsi = bsi_last (bb);
4296 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4298 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4300 case COND_EXPR:
4301 stmt = (e->flags & EDGE_TRUE_VALUE
4302 ? COND_EXPR_THEN (stmt)
4303 : COND_EXPR_ELSE (stmt));
4304 GOTO_DESTINATION (stmt) = label;
4305 break;
4307 case GOTO_EXPR:
4308 /* No non-abnormal edges should lead from a non-simple goto, and
4309 simple ones should be represented implicitly. */
4310 gcc_unreachable ();
4312 case SWITCH_EXPR:
4314 tree cases = get_cases_for_edge (e, stmt);
4316 /* If we have a list of cases associated with E, then use it
4317 as it's a lot faster than walking the entire case vector. */
4318 if (cases)
4320 edge e2 = find_edge (e->src, dest);
4321 tree last, first;
4323 first = cases;
4324 while (cases)
4326 last = cases;
4327 CASE_LABEL (cases) = label;
4328 cases = TREE_CHAIN (cases);
4331 /* If there was already an edge in the CFG, then we need
4332 to move all the cases associated with E to E2. */
4333 if (e2)
4335 tree cases2 = get_cases_for_edge (e2, stmt);
4337 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4338 TREE_CHAIN (cases2) = first;
4341 else
4343 tree vec = SWITCH_LABELS (stmt);
4344 size_t i, n = TREE_VEC_LENGTH (vec);
4346 for (i = 0; i < n; i++)
4348 tree elt = TREE_VEC_ELT (vec, i);
4350 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4351 CASE_LABEL (elt) = label;
4355 break;
4358 case RETURN_EXPR:
4359 bsi_remove (&bsi);
4360 e->flags |= EDGE_FALLTHRU;
4361 break;
4363 default:
4364 /* Otherwise it must be a fallthru edge, and we don't need to
4365 do anything besides redirecting it. */
4366 gcc_assert (e->flags & EDGE_FALLTHRU);
4367 break;
4370 /* Update/insert PHI nodes as necessary. */
4372 /* Now update the edges in the CFG. */
4373 e = ssa_redirect_edge (e, dest);
4375 return e;
4379 /* Simple wrapper, as we can always redirect fallthru edges. */
4381 static basic_block
4382 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4384 e = tree_redirect_edge_and_branch (e, dest);
4385 gcc_assert (e);
4387 return NULL;
4391 /* Splits basic block BB after statement STMT (but at least after the
4392 labels). If STMT is NULL, BB is split just after the labels. */
4394 static basic_block
4395 tree_split_block (basic_block bb, void *stmt)
4397 block_stmt_iterator bsi, bsi_tgt;
4398 tree act;
4399 basic_block new_bb;
4400 edge e;
4401 edge_iterator ei;
4403 new_bb = create_empty_bb (bb);
4405 /* Redirect the outgoing edges. */
4406 new_bb->succs = bb->succs;
4407 bb->succs = NULL;
4408 FOR_EACH_EDGE (e, ei, new_bb->succs)
4409 e->src = new_bb;
4411 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4412 stmt = NULL;
4414 /* Move everything from BSI to the new basic block. */
4415 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4417 act = bsi_stmt (bsi);
4418 if (TREE_CODE (act) == LABEL_EXPR)
4419 continue;
4421 if (!stmt)
4422 break;
4424 if (stmt == act)
4426 bsi_next (&bsi);
4427 break;
4431 bsi_tgt = bsi_start (new_bb);
4432 while (!bsi_end_p (bsi))
4434 act = bsi_stmt (bsi);
4435 bsi_remove (&bsi);
4436 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4439 return new_bb;
4443 /* Moves basic block BB after block AFTER. */
4445 static bool
4446 tree_move_block_after (basic_block bb, basic_block after)
4448 if (bb->prev_bb == after)
4449 return true;
4451 unlink_block (bb);
4452 link_block (bb, after);
4454 return true;
4458 /* Return true if basic_block can be duplicated. */
4460 static bool
4461 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4463 return true;
4466 /* Create a duplicate of the basic block BB. NOTE: This does not
4467 preserve SSA form. */
4469 static basic_block
4470 tree_duplicate_bb (basic_block bb)
4472 basic_block new_bb;
4473 block_stmt_iterator bsi, bsi_tgt;
4474 tree phi, val;
4475 ssa_op_iter op_iter;
4477 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4479 /* First copy the phi nodes. We do not copy phi node arguments here,
4480 since the edges are not ready yet. Keep the chain of phi nodes in
4481 the same order, so that we can add them later. */
4482 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4484 mark_for_rewrite (PHI_RESULT (phi));
4485 create_phi_node (PHI_RESULT (phi), new_bb);
4487 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4489 bsi_tgt = bsi_start (new_bb);
4490 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4492 tree stmt = bsi_stmt (bsi);
4493 tree copy;
4495 if (TREE_CODE (stmt) == LABEL_EXPR)
4496 continue;
4498 /* Record the definitions. */
4499 get_stmt_operands (stmt);
4501 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4502 mark_for_rewrite (val);
4504 copy = unshare_expr (stmt);
4506 /* Copy also the virtual operands. */
4507 get_stmt_ann (copy);
4508 copy_virtual_operands (copy, stmt);
4510 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4513 return new_bb;
4516 /* Basic block BB_COPY was created by code duplication. Add phi node
4517 arguments for edges going out of BB_COPY. The blocks that were
4518 duplicated have rbi->duplicated set to one. */
4520 void
4521 add_phi_args_after_copy_bb (basic_block bb_copy)
4523 basic_block bb, dest;
4524 edge e, e_copy;
4525 edge_iterator ei;
4526 tree phi, phi_copy, phi_next, def;
4528 bb = bb_copy->rbi->original;
4530 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4532 if (!phi_nodes (e_copy->dest))
4533 continue;
4535 if (e_copy->dest->rbi->duplicated)
4536 dest = e_copy->dest->rbi->original;
4537 else
4538 dest = e_copy->dest;
4540 e = find_edge (bb, dest);
4541 if (!e)
4543 /* During loop unrolling the target of the latch edge is copied.
4544 In this case we are not looking for edge to dest, but to
4545 duplicated block whose original was dest. */
4546 FOR_EACH_EDGE (e, ei, bb->succs)
4547 if (e->dest->rbi->duplicated
4548 && e->dest->rbi->original == dest)
4549 break;
4551 gcc_assert (e != NULL);
4554 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4555 phi;
4556 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4558 phi_next = PHI_CHAIN (phi);
4560 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4561 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4562 add_phi_arg (phi_copy, def, e_copy);
4567 /* Blocks in REGION_COPY array of length N_REGION were created by
4568 duplication of basic blocks. Add phi node arguments for edges
4569 going from these blocks. */
4571 void
4572 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4574 unsigned i;
4576 for (i = 0; i < n_region; i++)
4577 region_copy[i]->rbi->duplicated = 1;
4579 for (i = 0; i < n_region; i++)
4580 add_phi_args_after_copy_bb (region_copy[i]);
4582 for (i = 0; i < n_region; i++)
4583 region_copy[i]->rbi->duplicated = 0;
4586 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4588 struct ssa_name_map_entry
4590 tree from_name;
4591 tree to_name;
4594 /* Hash function for ssa_name_map_entry. */
4596 static hashval_t
4597 ssa_name_map_entry_hash (const void *entry)
4599 const struct ssa_name_map_entry *en = entry;
4600 return SSA_NAME_VERSION (en->from_name);
4603 /* Equality function for ssa_name_map_entry. */
4605 static int
4606 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4608 const struct ssa_name_map_entry *en = in_table;
4610 return en->from_name == ssa_name;
4613 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4614 to MAP. */
4616 void
4617 allocate_ssa_names (bitmap definitions, htab_t *map)
4619 tree name;
4620 struct ssa_name_map_entry *entry;
4621 PTR *slot;
4622 unsigned ver;
4623 bitmap_iterator bi;
4625 if (!*map)
4626 *map = htab_create (10, ssa_name_map_entry_hash,
4627 ssa_name_map_entry_eq, free);
4628 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4630 name = ssa_name (ver);
4631 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4632 INSERT);
4633 if (*slot)
4634 entry = *slot;
4635 else
4637 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4638 entry->from_name = name;
4639 *slot = entry;
4641 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4645 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4646 by the mapping MAP. */
4648 static void
4649 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4651 tree name = DEF_FROM_PTR (def);
4652 struct ssa_name_map_entry *entry;
4654 gcc_assert (TREE_CODE (name) == SSA_NAME);
4656 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4657 if (!entry)
4658 return;
4660 SET_DEF (def, entry->to_name);
4661 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4664 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4666 static void
4667 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4669 tree name = USE_FROM_PTR (use);
4670 struct ssa_name_map_entry *entry;
4672 if (TREE_CODE (name) != SSA_NAME)
4673 return;
4675 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4676 if (!entry)
4677 return;
4679 SET_USE (use, entry->to_name);
4682 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4683 mapping MAP. */
4685 void
4686 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4688 unsigned i;
4689 edge e;
4690 edge_iterator ei;
4691 tree phi, stmt;
4692 block_stmt_iterator bsi;
4693 use_optype uses;
4694 vuse_optype vuses;
4695 def_optype defs;
4696 v_may_def_optype v_may_defs;
4697 v_must_def_optype v_must_defs;
4698 stmt_ann_t ann;
4700 FOR_EACH_EDGE (e, ei, bb->preds)
4701 if (e->flags & EDGE_ABNORMAL)
4702 break;
4704 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4706 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4707 if (e)
4708 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4711 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4713 stmt = bsi_stmt (bsi);
4714 get_stmt_operands (stmt);
4715 ann = stmt_ann (stmt);
4717 uses = USE_OPS (ann);
4718 for (i = 0; i < NUM_USES (uses); i++)
4719 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4721 defs = DEF_OPS (ann);
4722 for (i = 0; i < NUM_DEFS (defs); i++)
4723 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4725 vuses = VUSE_OPS (ann);
4726 for (i = 0; i < NUM_VUSES (vuses); i++)
4727 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4729 v_may_defs = V_MAY_DEF_OPS (ann);
4730 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4732 rewrite_to_new_ssa_names_use
4733 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4734 rewrite_to_new_ssa_names_def
4735 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4738 v_must_defs = V_MUST_DEF_OPS (ann);
4739 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4741 rewrite_to_new_ssa_names_def
4742 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4743 rewrite_to_new_ssa_names_use
4744 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4748 FOR_EACH_EDGE (e, ei, bb->succs)
4749 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
4751 rewrite_to_new_ssa_names_use
4752 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4754 if (e->flags & EDGE_ABNORMAL)
4756 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4757 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4762 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4763 by the mapping MAP. */
4765 void
4766 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4768 unsigned r;
4770 for (r = 0; r < n_region; r++)
4771 rewrite_to_new_ssa_names_bb (region[r], map);
4774 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4775 important exit edge EXIT. By important we mean that no SSA name defined
4776 inside region is live over the other exit edges of the region. All entry
4777 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4778 to the duplicate of the region. SSA form, dominance and loop information
4779 is updated. The new basic blocks are stored to REGION_COPY in the same
4780 order as they had in REGION, provided that REGION_COPY is not NULL.
4781 The function returns false if it is unable to copy the region,
4782 true otherwise. */
4784 bool
4785 tree_duplicate_sese_region (edge entry, edge exit,
4786 basic_block *region, unsigned n_region,
4787 basic_block *region_copy)
4789 unsigned i, n_doms, ver;
4790 bool free_region_copy = false, copying_header = false;
4791 struct loop *loop = entry->dest->loop_father;
4792 edge exit_copy;
4793 bitmap definitions;
4794 tree phi;
4795 basic_block *doms;
4796 htab_t ssa_name_map = NULL;
4797 edge redirected;
4798 bitmap_iterator bi;
4800 if (!can_copy_bbs_p (region, n_region))
4801 return false;
4803 /* Some sanity checking. Note that we do not check for all possible
4804 missuses of the functions. I.e. if you ask to copy something weird,
4805 it will work, but the state of structures probably will not be
4806 correct. */
4808 for (i = 0; i < n_region; i++)
4810 /* We do not handle subloops, i.e. all the blocks must belong to the
4811 same loop. */
4812 if (region[i]->loop_father != loop)
4813 return false;
4815 if (region[i] != entry->dest
4816 && region[i] == loop->header)
4817 return false;
4820 loop->copy = loop;
4822 /* In case the function is used for loop header copying (which is the primary
4823 use), ensure that EXIT and its copy will be new latch and entry edges. */
4824 if (loop->header == entry->dest)
4826 copying_header = true;
4827 loop->copy = loop->outer;
4829 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4830 return false;
4832 for (i = 0; i < n_region; i++)
4833 if (region[i] != exit->src
4834 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4835 return false;
4838 if (!region_copy)
4840 region_copy = xmalloc (sizeof (basic_block) * n_region);
4841 free_region_copy = true;
4844 gcc_assert (!any_marked_for_rewrite_p ());
4846 /* Record blocks outside the region that are duplicated by something
4847 inside. */
4848 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4849 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4851 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4852 definitions = marked_ssa_names ();
4854 if (copying_header)
4856 loop->header = exit->dest;
4857 loop->latch = exit->src;
4860 /* Redirect the entry and add the phi node arguments. */
4861 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4862 gcc_assert (redirected != NULL);
4863 flush_pending_stmts (entry);
4865 /* Concerning updating of dominators: We must recount dominators
4866 for entry block and its copy. Anything that is outside of the region, but
4867 was dominated by something inside needs recounting as well. */
4868 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4869 doms[n_doms++] = entry->dest->rbi->original;
4870 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4871 free (doms);
4873 /* Add the other phi node arguments. */
4874 add_phi_args_after_copy (region_copy, n_region);
4876 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4877 uses, it should be possible to emit phi nodes just for definitions that
4878 are used outside region. */
4879 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4881 tree name = ssa_name (ver);
4883 phi = create_phi_node (name, exit->dest);
4884 add_phi_arg (phi, name, exit);
4885 add_phi_arg (phi, name, exit_copy);
4887 SSA_NAME_DEF_STMT (name) = phi;
4890 /* And create new definitions inside region and its copy. TODO -- once we
4891 have immediate uses, it might be better to leave definitions in region
4892 unchanged, create new ssa names for phi nodes on exit, and rewrite
4893 the uses, to avoid changing the copied region. */
4894 allocate_ssa_names (definitions, &ssa_name_map);
4895 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
4896 allocate_ssa_names (definitions, &ssa_name_map);
4897 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
4898 htab_delete (ssa_name_map);
4900 if (free_region_copy)
4901 free (region_copy);
4903 unmark_all_for_rewrite ();
4904 BITMAP_XFREE (definitions);
4906 return true;
4909 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4911 void
4912 dump_function_to_file (tree fn, FILE *file, int flags)
4914 tree arg, vars, var;
4915 bool ignore_topmost_bind = false, any_var = false;
4916 basic_block bb;
4917 tree chain;
4919 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4921 arg = DECL_ARGUMENTS (fn);
4922 while (arg)
4924 print_generic_expr (file, arg, dump_flags);
4925 if (TREE_CHAIN (arg))
4926 fprintf (file, ", ");
4927 arg = TREE_CHAIN (arg);
4929 fprintf (file, ")\n");
4931 if (flags & TDF_RAW)
4933 dump_node (fn, TDF_SLIM | flags, file);
4934 return;
4937 /* When GIMPLE is lowered, the variables are no longer available in
4938 BIND_EXPRs, so display them separately. */
4939 if (cfun && cfun->unexpanded_var_list)
4941 ignore_topmost_bind = true;
4943 fprintf (file, "{\n");
4944 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4946 var = TREE_VALUE (vars);
4948 print_generic_decl (file, var, flags);
4949 fprintf (file, "\n");
4951 any_var = true;
4955 if (basic_block_info)
4957 /* Make a CFG based dump. */
4958 check_bb_profile (ENTRY_BLOCK_PTR, file);
4959 if (!ignore_topmost_bind)
4960 fprintf (file, "{\n");
4962 if (any_var && n_basic_blocks)
4963 fprintf (file, "\n");
4965 FOR_EACH_BB (bb)
4966 dump_generic_bb (file, bb, 2, flags);
4968 fprintf (file, "}\n");
4969 check_bb_profile (EXIT_BLOCK_PTR, file);
4971 else
4973 int indent;
4975 /* Make a tree based dump. */
4976 chain = DECL_SAVED_TREE (fn);
4978 if (TREE_CODE (chain) == BIND_EXPR)
4980 if (ignore_topmost_bind)
4982 chain = BIND_EXPR_BODY (chain);
4983 indent = 2;
4985 else
4986 indent = 0;
4988 else
4990 if (!ignore_topmost_bind)
4991 fprintf (file, "{\n");
4992 indent = 2;
4995 if (any_var)
4996 fprintf (file, "\n");
4998 print_generic_stmt_indented (file, chain, flags, indent);
4999 if (ignore_topmost_bind)
5000 fprintf (file, "}\n");
5003 fprintf (file, "\n\n");
5007 /* Pretty print of the loops intermediate representation. */
5008 static void print_loop (FILE *, struct loop *, int);
5009 static void print_pred_bbs (FILE *, basic_block bb);
5010 static void print_succ_bbs (FILE *, basic_block bb);
5013 /* Print the predecessors indexes of edge E on FILE. */
5015 static void
5016 print_pred_bbs (FILE *file, basic_block bb)
5018 edge e;
5019 edge_iterator ei;
5021 FOR_EACH_EDGE (e, ei, bb->preds)
5022 fprintf (file, "bb_%d", e->src->index);
5026 /* Print the successors indexes of edge E on FILE. */
5028 static void
5029 print_succ_bbs (FILE *file, basic_block bb)
5031 edge e;
5032 edge_iterator ei;
5034 FOR_EACH_EDGE (e, ei, bb->succs)
5035 fprintf (file, "bb_%d", e->src->index);
5039 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5041 static void
5042 print_loop (FILE *file, struct loop *loop, int indent)
5044 char *s_indent;
5045 basic_block bb;
5047 if (loop == NULL)
5048 return;
5050 s_indent = (char *) alloca ((size_t) indent + 1);
5051 memset ((void *) s_indent, ' ', (size_t) indent);
5052 s_indent[indent] = '\0';
5054 /* Print the loop's header. */
5055 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5057 /* Print the loop's body. */
5058 fprintf (file, "%s{\n", s_indent);
5059 FOR_EACH_BB (bb)
5060 if (bb->loop_father == loop)
5062 /* Print the basic_block's header. */
5063 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5064 print_pred_bbs (file, bb);
5065 fprintf (file, "}, succs = {");
5066 print_succ_bbs (file, bb);
5067 fprintf (file, "})\n");
5069 /* Print the basic_block's body. */
5070 fprintf (file, "%s {\n", s_indent);
5071 tree_dump_bb (bb, file, indent + 4);
5072 fprintf (file, "%s }\n", s_indent);
5075 print_loop (file, loop->inner, indent + 2);
5076 fprintf (file, "%s}\n", s_indent);
5077 print_loop (file, loop->next, indent);
5081 /* Follow a CFG edge from the entry point of the program, and on entry
5082 of a loop, pretty print the loop structure on FILE. */
5084 void
5085 print_loop_ir (FILE *file)
5087 basic_block bb;
5089 bb = BASIC_BLOCK (0);
5090 if (bb && bb->loop_father)
5091 print_loop (file, bb->loop_father, 0);
5095 /* Debugging loops structure at tree level. */
5097 void
5098 debug_loop_ir (void)
5100 print_loop_ir (stderr);
5104 /* Return true if BB ends with a call, possibly followed by some
5105 instructions that must stay with the call. Return false,
5106 otherwise. */
5108 static bool
5109 tree_block_ends_with_call_p (basic_block bb)
5111 block_stmt_iterator bsi = bsi_last (bb);
5112 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5116 /* Return true if BB ends with a conditional branch. Return false,
5117 otherwise. */
5119 static bool
5120 tree_block_ends_with_condjump_p (basic_block bb)
5122 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5123 return (TREE_CODE (stmt) == COND_EXPR);
5127 /* Return true if we need to add fake edge to exit at statement T.
5128 Helper function for tree_flow_call_edges_add. */
5130 static bool
5131 need_fake_edge_p (tree t)
5133 tree call;
5135 /* NORETURN and LONGJMP calls already have an edge to exit.
5136 CONST, PURE and ALWAYS_RETURN calls do not need one.
5137 We don't currently check for CONST and PURE here, although
5138 it would be a good idea, because those attributes are
5139 figured out from the RTL in mark_constant_function, and
5140 the counter incrementation code from -fprofile-arcs
5141 leads to different results from -fbranch-probabilities. */
5142 call = get_call_expr_in (t);
5143 if (call
5144 && !(call_expr_flags (call) & (ECF_NORETURN | ECF_ALWAYS_RETURN)))
5145 return true;
5147 if (TREE_CODE (t) == ASM_EXPR
5148 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5149 return true;
5151 return false;
5155 /* Add fake edges to the function exit for any non constant and non
5156 noreturn calls, volatile inline assembly in the bitmap of blocks
5157 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5158 the number of blocks that were split.
5160 The goal is to expose cases in which entering a basic block does
5161 not imply that all subsequent instructions must be executed. */
5163 static int
5164 tree_flow_call_edges_add (sbitmap blocks)
5166 int i;
5167 int blocks_split = 0;
5168 int last_bb = last_basic_block;
5169 bool check_last_block = false;
5171 if (n_basic_blocks == 0)
5172 return 0;
5174 if (! blocks)
5175 check_last_block = true;
5176 else
5177 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5179 /* In the last basic block, before epilogue generation, there will be
5180 a fallthru edge to EXIT. Special care is required if the last insn
5181 of the last basic block is a call because make_edge folds duplicate
5182 edges, which would result in the fallthru edge also being marked
5183 fake, which would result in the fallthru edge being removed by
5184 remove_fake_edges, which would result in an invalid CFG.
5186 Moreover, we can't elide the outgoing fake edge, since the block
5187 profiler needs to take this into account in order to solve the minimal
5188 spanning tree in the case that the call doesn't return.
5190 Handle this by adding a dummy instruction in a new last basic block. */
5191 if (check_last_block)
5193 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5194 block_stmt_iterator bsi = bsi_last (bb);
5195 tree t = NULL_TREE;
5196 if (!bsi_end_p (bsi))
5197 t = bsi_stmt (bsi);
5199 if (need_fake_edge_p (t))
5201 edge e;
5203 e = find_edge (bb, EXIT_BLOCK_PTR);
5204 if (e)
5206 bsi_insert_on_edge (e, build_empty_stmt ());
5207 bsi_commit_edge_inserts ();
5212 /* Now add fake edges to the function exit for any non constant
5213 calls since there is no way that we can determine if they will
5214 return or not... */
5215 for (i = 0; i < last_bb; i++)
5217 basic_block bb = BASIC_BLOCK (i);
5218 block_stmt_iterator bsi;
5219 tree stmt, last_stmt;
5221 if (!bb)
5222 continue;
5224 if (blocks && !TEST_BIT (blocks, i))
5225 continue;
5227 bsi = bsi_last (bb);
5228 if (!bsi_end_p (bsi))
5230 last_stmt = bsi_stmt (bsi);
5233 stmt = bsi_stmt (bsi);
5234 if (need_fake_edge_p (stmt))
5236 edge e;
5237 /* The handling above of the final block before the
5238 epilogue should be enough to verify that there is
5239 no edge to the exit block in CFG already.
5240 Calling make_edge in such case would cause us to
5241 mark that edge as fake and remove it later. */
5242 #ifdef ENABLE_CHECKING
5243 if (stmt == last_stmt)
5245 e = find_edge (bb, EXIT_BLOCK_PTR);
5246 gcc_assert (e == NULL);
5248 #endif
5250 /* Note that the following may create a new basic block
5251 and renumber the existing basic blocks. */
5252 if (stmt != last_stmt)
5254 e = split_block (bb, stmt);
5255 if (e)
5256 blocks_split++;
5258 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5260 bsi_prev (&bsi);
5262 while (!bsi_end_p (bsi));
5266 if (blocks_split)
5267 verify_flow_info ();
5269 return blocks_split;
5272 bool
5273 tree_purge_dead_eh_edges (basic_block bb)
5275 bool changed = false;
5276 edge e;
5277 edge_iterator ei;
5278 tree stmt = last_stmt (bb);
5280 if (stmt && tree_can_throw_internal (stmt))
5281 return false;
5283 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5285 if (e->flags & EDGE_EH)
5287 remove_edge (e);
5288 changed = true;
5290 else
5291 ei_next (&ei);
5294 /* Removal of dead EH edges might change dominators of not
5295 just immediate successors. E.g. when bb1 is changed so that
5296 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5297 eh edges purged by this function in:
5301 1-->2
5302 / \ |
5303 v v |
5304 3-->4 |
5306 --->5
5309 idom(bb5) must be recomputed. For now just free the dominance
5310 info. */
5311 if (changed)
5312 free_dominance_info (CDI_DOMINATORS);
5314 return changed;
5317 bool
5318 tree_purge_all_dead_eh_edges (bitmap blocks)
5320 bool changed = false;
5321 unsigned i;
5322 bitmap_iterator bi;
5324 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5326 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5329 return changed;
5332 /* This function is called whenever a new edge is created or
5333 redirected. */
5335 static void
5336 tree_execute_on_growing_pred (edge e)
5338 basic_block bb = e->dest;
5340 if (phi_nodes (bb))
5341 reserve_phi_args_for_new_edge (bb);
5344 /* This function is called immediately before edge E is removed from
5345 the edge vector E->dest->preds. */
5347 static void
5348 tree_execute_on_shrinking_pred (edge e)
5350 if (phi_nodes (e->dest))
5351 remove_phi_args (e);
5354 struct cfg_hooks tree_cfg_hooks = {
5355 "tree",
5356 tree_verify_flow_info,
5357 tree_dump_bb, /* dump_bb */
5358 create_bb, /* create_basic_block */
5359 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5360 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5361 remove_bb, /* delete_basic_block */
5362 tree_split_block, /* split_block */
5363 tree_move_block_after, /* move_block_after */
5364 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5365 tree_merge_blocks, /* merge_blocks */
5366 tree_predict_edge, /* predict_edge */
5367 tree_predicted_by_p, /* predicted_by_p */
5368 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5369 tree_duplicate_bb, /* duplicate_block */
5370 tree_split_edge, /* split_edge */
5371 tree_make_forwarder_block, /* make_forward_block */
5372 NULL, /* tidy_fallthru_edge */
5373 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5374 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5375 tree_flow_call_edges_add, /* flow_call_edges_add */
5376 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5377 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5381 /* Split all critical edges. */
5383 static void
5384 split_critical_edges (void)
5386 basic_block bb;
5387 edge e;
5388 edge_iterator ei;
5390 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5391 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5392 mappings around the calls to split_edge. */
5393 start_recording_case_labels ();
5394 FOR_ALL_BB (bb)
5396 FOR_EACH_EDGE (e, ei, bb->succs)
5397 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5399 split_edge (e);
5402 end_recording_case_labels ();
5405 struct tree_opt_pass pass_split_crit_edges =
5407 "crited", /* name */
5408 NULL, /* gate */
5409 split_critical_edges, /* execute */
5410 NULL, /* sub */
5411 NULL, /* next */
5412 0, /* static_pass_number */
5413 TV_TREE_SPLIT_EDGES, /* tv_id */
5414 PROP_cfg, /* properties required */
5415 PROP_no_crit_edges, /* properties_provided */
5416 0, /* properties_destroyed */
5417 0, /* todo_flags_start */
5418 TODO_dump_func, /* todo_flags_finish */
5419 0 /* letter */
5423 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5424 a temporary, make sure and register it to be renamed if necessary,
5425 and finally return the temporary. Put the statements to compute
5426 EXP before the current statement in BSI. */
5428 tree
5429 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5431 tree t, new_stmt, orig_stmt;
5433 if (is_gimple_val (exp))
5434 return exp;
5436 t = make_rename_temp (type, NULL);
5437 new_stmt = build (MODIFY_EXPR, type, t, exp);
5439 orig_stmt = bsi_stmt (*bsi);
5440 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5441 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5443 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5445 return t;
5448 /* Build a ternary operation and gimplify it. Emit code before BSI.
5449 Return the gimple_val holding the result. */
5451 tree
5452 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5453 tree type, tree a, tree b, tree c)
5455 tree ret;
5457 ret = fold (build3 (code, type, a, b, c));
5458 STRIP_NOPS (ret);
5460 return gimplify_val (bsi, type, ret);
5463 /* Build a binary operation and gimplify it. Emit code before BSI.
5464 Return the gimple_val holding the result. */
5466 tree
5467 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5468 tree type, tree a, tree b)
5470 tree ret;
5472 ret = fold (build2 (code, type, a, b));
5473 STRIP_NOPS (ret);
5475 return gimplify_val (bsi, type, ret);
5478 /* Build a unary operation and gimplify it. Emit code before BSI.
5479 Return the gimple_val holding the result. */
5481 tree
5482 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5483 tree a)
5485 tree ret;
5487 ret = fold (build1 (code, type, a));
5488 STRIP_NOPS (ret);
5490 return gimplify_val (bsi, type, ret);
5495 /* Emit return warnings. */
5497 static void
5498 execute_warn_function_return (void)
5500 #ifdef USE_MAPPED_LOCATION
5501 source_location location;
5502 #else
5503 location_t *locus;
5504 #endif
5505 tree last;
5506 edge e;
5507 edge_iterator ei;
5509 if (warn_missing_noreturn
5510 && !TREE_THIS_VOLATILE (cfun->decl)
5511 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5512 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5513 warning ("%Jfunction might be possible candidate for "
5514 "attribute %<noreturn%>",
5515 cfun->decl);
5517 /* If we have a path to EXIT, then we do return. */
5518 if (TREE_THIS_VOLATILE (cfun->decl)
5519 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5521 #ifdef USE_MAPPED_LOCATION
5522 location = UNKNOWN_LOCATION;
5523 #else
5524 locus = NULL;
5525 #endif
5526 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5528 last = last_stmt (e->src);
5529 if (TREE_CODE (last) == RETURN_EXPR
5530 #ifdef USE_MAPPED_LOCATION
5531 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5532 #else
5533 && (locus = EXPR_LOCUS (last)) != NULL)
5534 #endif
5535 break;
5537 #ifdef USE_MAPPED_LOCATION
5538 if (location == UNKNOWN_LOCATION)
5539 location = cfun->function_end_locus;
5540 warning ("%H%<noreturn%> function does return", &location);
5541 #else
5542 if (!locus)
5543 locus = &cfun->function_end_locus;
5544 warning ("%H%<noreturn%> function does return", locus);
5545 #endif
5548 /* If we see "return;" in some basic block, then we do reach the end
5549 without returning a value. */
5550 else if (warn_return_type
5551 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5552 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5554 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5556 tree last = last_stmt (e->src);
5557 if (TREE_CODE (last) == RETURN_EXPR
5558 && TREE_OPERAND (last, 0) == NULL)
5560 #ifdef USE_MAPPED_LOCATION
5561 location = EXPR_LOCATION (last);
5562 if (location == UNKNOWN_LOCATION)
5563 location = cfun->function_end_locus;
5564 warning ("%Hcontrol reaches end of non-void function", &location);
5565 #else
5566 locus = EXPR_LOCUS (last);
5567 if (!locus)
5568 locus = &cfun->function_end_locus;
5569 warning ("%Hcontrol reaches end of non-void function", locus);
5570 #endif
5571 break;
5578 /* Given a basic block B which ends with a conditional and has
5579 precisely two successors, determine which of the edges is taken if
5580 the conditional is true and which is taken if the conditional is
5581 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5583 void
5584 extract_true_false_edges_from_block (basic_block b,
5585 edge *true_edge,
5586 edge *false_edge)
5588 edge e = EDGE_SUCC (b, 0);
5590 if (e->flags & EDGE_TRUE_VALUE)
5592 *true_edge = e;
5593 *false_edge = EDGE_SUCC (b, 1);
5595 else
5597 *false_edge = e;
5598 *true_edge = EDGE_SUCC (b, 1);
5602 struct tree_opt_pass pass_warn_function_return =
5604 NULL, /* name */
5605 NULL, /* gate */
5606 execute_warn_function_return, /* execute */
5607 NULL, /* sub */
5608 NULL, /* next */
5609 0, /* static_pass_number */
5610 0, /* tv_id */
5611 PROP_cfg, /* properties_required */
5612 0, /* properties_provided */
5613 0, /* properties_destroyed */
5614 0, /* todo_flags_start */
5615 0, /* todo_flags_finish */
5616 0 /* letter */
5619 #include "gt-tree-cfg.h"