2005-04-29 Jim Tison <jtison@us.ibm.com>
[official-gcc.git] / gcc / tree-cfg.c
blobc6494cc3d732b8e19766d20bf1554c72c5c15df4
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void create_block_annotation (basic_block);
98 static void free_blocks_annotations (void);
99 static void clear_blocks_annotations (void);
100 static void make_blocks (tree);
101 static void factor_computed_gotos (void);
103 /* Edges. */
104 static void make_edges (void);
105 static void make_ctrl_stmt_edges (basic_block);
106 static void make_exit_edges (basic_block);
107 static void make_cond_expr_edges (basic_block);
108 static void make_switch_expr_edges (basic_block);
109 static void make_goto_expr_edges (basic_block);
110 static edge tree_redirect_edge_and_branch (edge, basic_block);
111 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
112 static void split_critical_edges (void);
113 static bool remove_fallthru_edge (VEC(edge,gc) *);
115 /* Various helpers. */
116 static inline bool stmt_starts_bb_p (tree, tree);
117 static int tree_verify_flow_info (void);
118 static void tree_make_forwarder_block (edge);
119 static bool tree_forwarder_block_p (basic_block, bool);
120 static void tree_cfg2vcg (FILE *);
122 /* Flowgraph optimization and cleanup. */
123 static void tree_merge_blocks (basic_block, basic_block);
124 static bool tree_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static bool cleanup_control_flow (void);
127 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
128 static edge find_taken_edge_computed_goto (basic_block, tree);
129 static edge find_taken_edge_cond_expr (basic_block, tree);
130 static edge find_taken_edge_switch_expr (basic_block, tree);
131 static tree find_case_label_for_value (tree, tree);
132 static bool phi_alternatives_equal (basic_block, edge, edge);
133 static bool cleanup_forwarder_blocks (void);
136 /*---------------------------------------------------------------------------
137 Create basic blocks
138 ---------------------------------------------------------------------------*/
140 /* Entry point to the CFG builder for trees. TP points to the list of
141 statements to be added to the flowgraph. */
143 static void
144 build_tree_cfg (tree *tp)
146 /* Register specific tree functions. */
147 tree_register_cfg_hooks ();
149 /* Initialize the basic block array. */
150 init_flow ();
151 profile_status = PROFILE_ABSENT;
152 n_basic_blocks = 0;
153 last_basic_block = 0;
154 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
155 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
157 /* Build a mapping of labels to their associated blocks. */
158 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
159 "label to block map");
161 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
162 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
164 found_computed_goto = 0;
165 make_blocks (*tp);
167 /* Computed gotos are hell to deal with, especially if there are
168 lots of them with a large number of destinations. So we factor
169 them to a common computed goto location before we build the
170 edge list. After we convert back to normal form, we will un-factor
171 the computed gotos since factoring introduces an unwanted jump. */
172 if (found_computed_goto)
173 factor_computed_gotos ();
175 /* Make sure there is always at least one block, even if it's empty. */
176 if (n_basic_blocks == 0)
177 create_empty_bb (ENTRY_BLOCK_PTR);
179 create_block_annotation (ENTRY_BLOCK_PTR);
180 create_block_annotation (EXIT_BLOCK_PTR);
182 /* Adjust the size of the array. */
183 VARRAY_GROW (basic_block_info, n_basic_blocks);
185 /* To speed up statement iterator walks, we first purge dead labels. */
186 cleanup_dead_labels ();
188 /* Group case nodes to reduce the number of edges.
189 We do this after cleaning up dead labels because otherwise we miss
190 a lot of obvious case merging opportunities. */
191 group_case_labels ();
193 /* Create the edges of the flowgraph. */
194 make_edges ();
196 /* Debugging dumps. */
198 /* Write the flowgraph to a VCG file. */
200 int local_dump_flags;
201 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
202 if (dump_file)
204 tree_cfg2vcg (dump_file);
205 dump_end (TDI_vcg, dump_file);
209 /* Dump a textual representation of the flowgraph. */
210 if (dump_file)
211 dump_tree_cfg (dump_file, dump_flags);
214 static void
215 execute_build_cfg (void)
217 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
220 struct tree_opt_pass pass_build_cfg =
222 "cfg", /* name */
223 NULL, /* gate */
224 execute_build_cfg, /* execute */
225 NULL, /* sub */
226 NULL, /* next */
227 0, /* static_pass_number */
228 TV_TREE_CFG, /* tv_id */
229 PROP_gimple_leh, /* properties_required */
230 PROP_cfg, /* properties_provided */
231 0, /* properties_destroyed */
232 0, /* todo_flags_start */
233 TODO_verify_stmts, /* todo_flags_finish */
234 0 /* letter */
237 /* Search the CFG for any computed gotos. If found, factor them to a
238 common computed goto site. Also record the location of that site so
239 that we can un-factor the gotos after we have converted back to
240 normal form. */
242 static void
243 factor_computed_gotos (void)
245 basic_block bb;
246 tree factored_label_decl = NULL;
247 tree var = NULL;
248 tree factored_computed_goto_label = NULL;
249 tree factored_computed_goto = NULL;
251 /* We know there are one or more computed gotos in this function.
252 Examine the last statement in each basic block to see if the block
253 ends with a computed goto. */
255 FOR_EACH_BB (bb)
257 block_stmt_iterator bsi = bsi_last (bb);
258 tree last;
260 if (bsi_end_p (bsi))
261 continue;
262 last = bsi_stmt (bsi);
264 /* Ignore the computed goto we create when we factor the original
265 computed gotos. */
266 if (last == factored_computed_goto)
267 continue;
269 /* If the last statement is a computed goto, factor it. */
270 if (computed_goto_p (last))
272 tree assignment;
274 /* The first time we find a computed goto we need to create
275 the factored goto block and the variable each original
276 computed goto will use for their goto destination. */
277 if (! factored_computed_goto)
279 basic_block new_bb = create_empty_bb (bb);
280 block_stmt_iterator new_bsi = bsi_start (new_bb);
282 /* Create the destination of the factored goto. Each original
283 computed goto will put its desired destination into this
284 variable and jump to the label we create immediately
285 below. */
286 var = create_tmp_var (ptr_type_node, "gotovar");
288 /* Build a label for the new block which will contain the
289 factored computed goto. */
290 factored_label_decl = create_artificial_label ();
291 factored_computed_goto_label
292 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
293 bsi_insert_after (&new_bsi, factored_computed_goto_label,
294 BSI_NEW_STMT);
296 /* Build our new computed goto. */
297 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
298 bsi_insert_after (&new_bsi, factored_computed_goto,
299 BSI_NEW_STMT);
302 /* Copy the original computed goto's destination into VAR. */
303 assignment = build (MODIFY_EXPR, ptr_type_node,
304 var, GOTO_DESTINATION (last));
305 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
307 /* And re-vector the computed goto to the new destination. */
308 GOTO_DESTINATION (last) = factored_label_decl;
314 /* Create annotations for a single basic block. */
316 static void
317 create_block_annotation (basic_block bb)
319 /* Verify that the tree_annotations field is clear. */
320 gcc_assert (!bb->tree_annotations);
321 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
325 /* Free the annotations for all the basic blocks. */
327 static void free_blocks_annotations (void)
329 clear_blocks_annotations ();
333 /* Clear the annotations for all the basic blocks. */
335 static void
336 clear_blocks_annotations (void)
338 basic_block bb;
340 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
341 bb->tree_annotations = NULL;
345 /* Build a flowgraph for the statement_list STMT_LIST. */
347 static void
348 make_blocks (tree stmt_list)
350 tree_stmt_iterator i = tsi_start (stmt_list);
351 tree stmt = NULL;
352 bool start_new_block = true;
353 bool first_stmt_of_list = true;
354 basic_block bb = ENTRY_BLOCK_PTR;
356 while (!tsi_end_p (i))
358 tree prev_stmt;
360 prev_stmt = stmt;
361 stmt = tsi_stmt (i);
363 /* If the statement starts a new basic block or if we have determined
364 in a previous pass that we need to create a new block for STMT, do
365 so now. */
366 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
368 if (!first_stmt_of_list)
369 stmt_list = tsi_split_statement_list_before (&i);
370 bb = create_basic_block (stmt_list, NULL, bb);
371 start_new_block = false;
374 /* Now add STMT to BB and create the subgraphs for special statement
375 codes. */
376 set_bb_for_stmt (stmt, bb);
378 if (computed_goto_p (stmt))
379 found_computed_goto = true;
381 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
382 next iteration. */
383 if (stmt_ends_bb_p (stmt))
384 start_new_block = true;
386 tsi_next (&i);
387 first_stmt_of_list = false;
392 /* Create and return a new empty basic block after bb AFTER. */
394 static basic_block
395 create_bb (void *h, void *e, basic_block after)
397 basic_block bb;
399 gcc_assert (!e);
401 /* Create and initialize a new basic block. Since alloc_block uses
402 ggc_alloc_cleared to allocate a basic block, we do not have to
403 clear the newly allocated basic block here. */
404 bb = alloc_block ();
406 bb->index = last_basic_block;
407 bb->flags = BB_NEW;
408 bb->stmt_list = h ? h : alloc_stmt_list ();
410 /* Add the new block to the linked list of blocks. */
411 link_block (bb, after);
413 /* Grow the basic block array if needed. */
414 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
416 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
417 VARRAY_GROW (basic_block_info, new_size);
420 /* Add the newly created block to the array. */
421 BASIC_BLOCK (last_basic_block) = bb;
423 create_block_annotation (bb);
425 n_basic_blocks++;
426 last_basic_block++;
428 initialize_bb_rbi (bb);
429 return bb;
433 /*---------------------------------------------------------------------------
434 Edge creation
435 ---------------------------------------------------------------------------*/
437 /* Fold COND_EXPR_COND of each COND_EXPR. */
439 static void
440 fold_cond_expr_cond (void)
442 basic_block bb;
444 FOR_EACH_BB (bb)
446 tree stmt = last_stmt (bb);
448 if (stmt
449 && TREE_CODE (stmt) == COND_EXPR)
451 tree cond = fold (COND_EXPR_COND (stmt));
452 if (integer_zerop (cond))
453 COND_EXPR_COND (stmt) = boolean_false_node;
454 else if (integer_onep (cond))
455 COND_EXPR_COND (stmt) = boolean_true_node;
460 /* Join all the blocks in the flowgraph. */
462 static void
463 make_edges (void)
465 basic_block bb;
467 /* Create an edge from entry to the first block with executable
468 statements in it. */
469 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
471 /* Traverse the basic block array placing edges. */
472 FOR_EACH_BB (bb)
474 tree first = first_stmt (bb);
475 tree last = last_stmt (bb);
477 if (first)
479 /* Edges for statements that always alter flow control. */
480 if (is_ctrl_stmt (last))
481 make_ctrl_stmt_edges (bb);
483 /* Edges for statements that sometimes alter flow control. */
484 if (is_ctrl_altering_stmt (last))
485 make_exit_edges (bb);
488 /* Finally, if no edges were created above, this is a regular
489 basic block that only needs a fallthru edge. */
490 if (EDGE_COUNT (bb->succs) == 0)
491 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
494 /* We do not care about fake edges, so remove any that the CFG
495 builder inserted for completeness. */
496 remove_fake_exit_edges ();
498 /* Fold COND_EXPR_COND of each COND_EXPR. */
499 fold_cond_expr_cond ();
501 /* Clean up the graph and warn for unreachable code. */
502 cleanup_tree_cfg ();
506 /* Create edges for control statement at basic block BB. */
508 static void
509 make_ctrl_stmt_edges (basic_block bb)
511 tree last = last_stmt (bb);
513 gcc_assert (last);
514 switch (TREE_CODE (last))
516 case GOTO_EXPR:
517 make_goto_expr_edges (bb);
518 break;
520 case RETURN_EXPR:
521 make_edge (bb, EXIT_BLOCK_PTR, 0);
522 break;
524 case COND_EXPR:
525 make_cond_expr_edges (bb);
526 break;
528 case SWITCH_EXPR:
529 make_switch_expr_edges (bb);
530 break;
532 case RESX_EXPR:
533 make_eh_edges (last);
534 /* Yet another NORETURN hack. */
535 if (EDGE_COUNT (bb->succs) == 0)
536 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
537 break;
539 default:
540 gcc_unreachable ();
545 /* Create exit edges for statements in block BB that alter the flow of
546 control. Statements that alter the control flow are 'goto', 'return'
547 and calls to non-returning functions. */
549 static void
550 make_exit_edges (basic_block bb)
552 tree last = last_stmt (bb), op;
554 gcc_assert (last);
555 switch (TREE_CODE (last))
557 case RESX_EXPR:
558 break;
559 case CALL_EXPR:
560 /* If this function receives a nonlocal goto, then we need to
561 make edges from this call site to all the nonlocal goto
562 handlers. */
563 if (TREE_SIDE_EFFECTS (last)
564 && current_function_has_nonlocal_label)
565 make_goto_expr_edges (bb);
567 /* If this statement has reachable exception handlers, then
568 create abnormal edges to them. */
569 make_eh_edges (last);
571 /* Some calls are known not to return. For such calls we create
572 a fake edge.
574 We really need to revamp how we build edges so that it's not
575 such a bloody pain to avoid creating edges for this case since
576 all we do is remove these edges when we're done building the
577 CFG. */
578 if (call_expr_flags (last) & ECF_NORETURN)
580 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
581 return;
584 /* Don't forget the fall-thru edge. */
585 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
586 break;
588 case MODIFY_EXPR:
589 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
590 may have an abnormal edge. Search the RHS for this case and
591 create any required edges. */
592 op = get_call_expr_in (last);
593 if (op && TREE_SIDE_EFFECTS (op)
594 && current_function_has_nonlocal_label)
595 make_goto_expr_edges (bb);
597 make_eh_edges (last);
598 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
599 break;
601 default:
602 gcc_unreachable ();
607 /* Create the edges for a COND_EXPR starting at block BB.
608 At this point, both clauses must contain only simple gotos. */
610 static void
611 make_cond_expr_edges (basic_block bb)
613 tree entry = last_stmt (bb);
614 basic_block then_bb, else_bb;
615 tree then_label, else_label;
617 gcc_assert (entry);
618 gcc_assert (TREE_CODE (entry) == COND_EXPR);
620 /* Entry basic blocks for each component. */
621 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
622 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
623 then_bb = label_to_block (then_label);
624 else_bb = label_to_block (else_label);
626 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
627 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
630 /* Hashing routine for EDGE_TO_CASES. */
632 static hashval_t
633 edge_to_cases_hash (const void *p)
635 edge e = ((struct edge_to_cases_elt *)p)->e;
637 /* Hash on the edge itself (which is a pointer). */
638 return htab_hash_pointer (e);
641 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
642 for equality is just a pointer comparison. */
644 static int
645 edge_to_cases_eq (const void *p1, const void *p2)
647 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
648 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
650 return e1 == e2;
653 /* Called for each element in the hash table (P) as we delete the
654 edge to cases hash table.
656 Clear all the TREE_CHAINs to prevent problems with copying of
657 SWITCH_EXPRs and structure sharing rules, then free the hash table
658 element. */
660 static void
661 edge_to_cases_cleanup (void *p)
663 struct edge_to_cases_elt *elt = p;
664 tree t, next;
666 for (t = elt->case_labels; t; t = next)
668 next = TREE_CHAIN (t);
669 TREE_CHAIN (t) = NULL;
671 free (p);
674 /* Start recording information mapping edges to case labels. */
676 static void
677 start_recording_case_labels (void)
679 gcc_assert (edge_to_cases == NULL);
681 edge_to_cases = htab_create (37,
682 edge_to_cases_hash,
683 edge_to_cases_eq,
684 edge_to_cases_cleanup);
687 /* Return nonzero if we are recording information for case labels. */
689 static bool
690 recording_case_labels_p (void)
692 return (edge_to_cases != NULL);
695 /* Stop recording information mapping edges to case labels and
696 remove any information we have recorded. */
697 static void
698 end_recording_case_labels (void)
700 htab_delete (edge_to_cases);
701 edge_to_cases = NULL;
704 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
706 static void
707 record_switch_edge (edge e, tree case_label)
709 struct edge_to_cases_elt *elt;
710 void **slot;
712 /* Build a hash table element so we can see if E is already
713 in the table. */
714 elt = xmalloc (sizeof (struct edge_to_cases_elt));
715 elt->e = e;
716 elt->case_labels = case_label;
718 slot = htab_find_slot (edge_to_cases, elt, INSERT);
720 if (*slot == NULL)
722 /* E was not in the hash table. Install E into the hash table. */
723 *slot = (void *)elt;
725 else
727 /* E was already in the hash table. Free ELT as we do not need it
728 anymore. */
729 free (elt);
731 /* Get the entry stored in the hash table. */
732 elt = (struct edge_to_cases_elt *) *slot;
734 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
735 TREE_CHAIN (case_label) = elt->case_labels;
736 elt->case_labels = case_label;
740 /* If we are inside a {start,end}_recording_cases block, then return
741 a chain of CASE_LABEL_EXPRs from T which reference E.
743 Otherwise return NULL. */
745 static tree
746 get_cases_for_edge (edge e, tree t)
748 struct edge_to_cases_elt elt, *elt_p;
749 void **slot;
750 size_t i, n;
751 tree vec;
753 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
754 chains available. Return NULL so the caller can detect this case. */
755 if (!recording_case_labels_p ())
756 return NULL;
758 restart:
759 elt.e = e;
760 elt.case_labels = NULL;
761 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
763 if (slot)
765 elt_p = (struct edge_to_cases_elt *)*slot;
766 return elt_p->case_labels;
769 /* If we did not find E in the hash table, then this must be the first
770 time we have been queried for information about E & T. Add all the
771 elements from T to the hash table then perform the query again. */
773 vec = SWITCH_LABELS (t);
774 n = TREE_VEC_LENGTH (vec);
775 for (i = 0; i < n; i++)
777 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
778 basic_block label_bb = label_to_block (lab);
779 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
781 goto restart;
784 /* Create the edges for a SWITCH_EXPR starting at block BB.
785 At this point, the switch body has been lowered and the
786 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
788 static void
789 make_switch_expr_edges (basic_block bb)
791 tree entry = last_stmt (bb);
792 size_t i, n;
793 tree vec;
795 vec = SWITCH_LABELS (entry);
796 n = TREE_VEC_LENGTH (vec);
798 for (i = 0; i < n; ++i)
800 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
801 basic_block label_bb = label_to_block (lab);
802 make_edge (bb, label_bb, 0);
807 /* Return the basic block holding label DEST. */
809 basic_block
810 label_to_block_fn (struct function *ifun, tree dest)
812 int uid = LABEL_DECL_UID (dest);
814 /* We would die hard when faced by an undefined label. Emit a label to
815 the very first basic block. This will hopefully make even the dataflow
816 and undefined variable warnings quite right. */
817 if ((errorcount || sorrycount) && uid < 0)
819 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
820 tree stmt;
822 stmt = build1 (LABEL_EXPR, void_type_node, dest);
823 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
824 uid = LABEL_DECL_UID (dest);
826 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
829 /* Create edges for a goto statement at block BB. */
831 static void
832 make_goto_expr_edges (basic_block bb)
834 tree goto_t;
835 basic_block target_bb;
836 int for_call;
837 block_stmt_iterator last = bsi_last (bb);
839 goto_t = bsi_stmt (last);
841 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
842 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
843 from a nonlocal goto. */
844 if (TREE_CODE (goto_t) != GOTO_EXPR)
845 for_call = 1;
846 else
848 tree dest = GOTO_DESTINATION (goto_t);
849 for_call = 0;
851 /* A GOTO to a local label creates normal edges. */
852 if (simple_goto_p (goto_t))
854 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
855 #ifdef USE_MAPPED_LOCATION
856 e->goto_locus = EXPR_LOCATION (goto_t);
857 #else
858 e->goto_locus = EXPR_LOCUS (goto_t);
859 #endif
860 bsi_remove (&last);
861 return;
864 /* Nothing more to do for nonlocal gotos. */
865 if (TREE_CODE (dest) == LABEL_DECL)
866 return;
868 /* Computed gotos remain. */
871 /* Look for the block starting with the destination label. In the
872 case of a computed goto, make an edge to any label block we find
873 in the CFG. */
874 FOR_EACH_BB (target_bb)
876 block_stmt_iterator bsi;
878 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
880 tree target = bsi_stmt (bsi);
882 if (TREE_CODE (target) != LABEL_EXPR)
883 break;
885 if (
886 /* Computed GOTOs. Make an edge to every label block that has
887 been marked as a potential target for a computed goto. */
888 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
889 /* Nonlocal GOTO target. Make an edge to every label block
890 that has been marked as a potential target for a nonlocal
891 goto. */
892 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
894 make_edge (bb, target_bb, EDGE_ABNORMAL);
895 break;
900 /* Degenerate case of computed goto with no labels. */
901 if (!for_call && EDGE_COUNT (bb->succs) == 0)
902 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
906 /*---------------------------------------------------------------------------
907 Flowgraph analysis
908 ---------------------------------------------------------------------------*/
910 /* Remove unreachable blocks and other miscellaneous clean up work. */
912 bool
913 cleanup_tree_cfg (void)
915 bool retval = false;
917 timevar_push (TV_TREE_CLEANUP_CFG);
919 retval = cleanup_control_flow ();
920 retval |= delete_unreachable_blocks ();
922 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
923 which can get expensive. So we want to enable recording of edge
924 to CASE_LABEL_EXPR mappings around the call to
925 cleanup_forwarder_blocks. */
926 start_recording_case_labels ();
927 retval |= cleanup_forwarder_blocks ();
928 end_recording_case_labels ();
930 #ifdef ENABLE_CHECKING
931 if (retval)
933 gcc_assert (!cleanup_control_flow ());
934 gcc_assert (!delete_unreachable_blocks ());
935 gcc_assert (!cleanup_forwarder_blocks ());
937 #endif
939 /* Merging the blocks creates no new opportunities for the other
940 optimizations, so do it here. */
941 retval |= merge_seq_blocks ();
943 compact_blocks ();
945 #ifdef ENABLE_CHECKING
946 verify_flow_info ();
947 #endif
948 timevar_pop (TV_TREE_CLEANUP_CFG);
949 return retval;
953 /* Cleanup cfg and repair loop structures. */
955 void
956 cleanup_tree_cfg_loop (void)
958 bitmap changed_bbs = BITMAP_ALLOC (NULL);
960 cleanup_tree_cfg ();
962 fix_loop_structure (current_loops, changed_bbs);
963 calculate_dominance_info (CDI_DOMINATORS);
965 /* This usually does nothing. But sometimes parts of cfg that originally
966 were inside a loop get out of it due to edge removal (since they
967 become unreachable by back edges from latch). */
968 rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa);
970 BITMAP_FREE (changed_bbs);
972 #ifdef ENABLE_CHECKING
973 verify_loop_structure (current_loops);
974 #endif
977 /* Cleanup useless labels in basic blocks. This is something we wish
978 to do early because it allows us to group case labels before creating
979 the edges for the CFG, and it speeds up block statement iterators in
980 all passes later on.
981 We only run this pass once, running it more than once is probably not
982 profitable. */
984 /* A map from basic block index to the leading label of that block. */
985 static tree *label_for_bb;
987 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
988 static void
989 update_eh_label (struct eh_region *region)
991 tree old_label = get_eh_region_tree_label (region);
992 if (old_label)
994 tree new_label;
995 basic_block bb = label_to_block (old_label);
997 /* ??? After optimizing, there may be EH regions with labels
998 that have already been removed from the function body, so
999 there is no basic block for them. */
1000 if (! bb)
1001 return;
1003 new_label = label_for_bb[bb->index];
1004 set_eh_region_tree_label (region, new_label);
1008 /* Given LABEL return the first label in the same basic block. */
1009 static tree
1010 main_block_label (tree label)
1012 basic_block bb = label_to_block (label);
1014 /* label_to_block possibly inserted undefined label into the chain. */
1015 if (!label_for_bb[bb->index])
1016 label_for_bb[bb->index] = label;
1017 return label_for_bb[bb->index];
1020 /* Cleanup redundant labels. This is a three-step process:
1021 1) Find the leading label for each block.
1022 2) Redirect all references to labels to the leading labels.
1023 3) Cleanup all useless labels. */
1025 void
1026 cleanup_dead_labels (void)
1028 basic_block bb;
1029 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1031 /* Find a suitable label for each block. We use the first user-defined
1032 label if there is one, or otherwise just the first label we see. */
1033 FOR_EACH_BB (bb)
1035 block_stmt_iterator i;
1037 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1039 tree label, stmt = bsi_stmt (i);
1041 if (TREE_CODE (stmt) != LABEL_EXPR)
1042 break;
1044 label = LABEL_EXPR_LABEL (stmt);
1046 /* If we have not yet seen a label for the current block,
1047 remember this one and see if there are more labels. */
1048 if (! label_for_bb[bb->index])
1050 label_for_bb[bb->index] = label;
1051 continue;
1054 /* If we did see a label for the current block already, but it
1055 is an artificially created label, replace it if the current
1056 label is a user defined label. */
1057 if (! DECL_ARTIFICIAL (label)
1058 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1060 label_for_bb[bb->index] = label;
1061 break;
1066 /* Now redirect all jumps/branches to the selected label.
1067 First do so for each block ending in a control statement. */
1068 FOR_EACH_BB (bb)
1070 tree stmt = last_stmt (bb);
1071 if (!stmt)
1072 continue;
1074 switch (TREE_CODE (stmt))
1076 case COND_EXPR:
1078 tree true_branch, false_branch;
1080 true_branch = COND_EXPR_THEN (stmt);
1081 false_branch = COND_EXPR_ELSE (stmt);
1083 GOTO_DESTINATION (true_branch)
1084 = main_block_label (GOTO_DESTINATION (true_branch));
1085 GOTO_DESTINATION (false_branch)
1086 = main_block_label (GOTO_DESTINATION (false_branch));
1088 break;
1091 case SWITCH_EXPR:
1093 size_t i;
1094 tree vec = SWITCH_LABELS (stmt);
1095 size_t n = TREE_VEC_LENGTH (vec);
1097 /* Replace all destination labels. */
1098 for (i = 0; i < n; ++i)
1100 tree elt = TREE_VEC_ELT (vec, i);
1101 tree label = main_block_label (CASE_LABEL (elt));
1102 CASE_LABEL (elt) = label;
1104 break;
1107 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1108 remove them until after we've created the CFG edges. */
1109 case GOTO_EXPR:
1110 if (! computed_goto_p (stmt))
1112 GOTO_DESTINATION (stmt)
1113 = main_block_label (GOTO_DESTINATION (stmt));
1114 break;
1117 default:
1118 break;
1122 for_each_eh_region (update_eh_label);
1124 /* Finally, purge dead labels. All user-defined labels and labels that
1125 can be the target of non-local gotos are preserved. */
1126 FOR_EACH_BB (bb)
1128 block_stmt_iterator i;
1129 tree label_for_this_bb = label_for_bb[bb->index];
1131 if (! label_for_this_bb)
1132 continue;
1134 for (i = bsi_start (bb); !bsi_end_p (i); )
1136 tree label, stmt = bsi_stmt (i);
1138 if (TREE_CODE (stmt) != LABEL_EXPR)
1139 break;
1141 label = LABEL_EXPR_LABEL (stmt);
1143 if (label == label_for_this_bb
1144 || ! DECL_ARTIFICIAL (label)
1145 || DECL_NONLOCAL (label))
1146 bsi_next (&i);
1147 else
1148 bsi_remove (&i);
1152 free (label_for_bb);
1155 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1156 and scan the sorted vector of cases. Combine the ones jumping to the
1157 same label.
1158 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1160 void
1161 group_case_labels (void)
1163 basic_block bb;
1165 FOR_EACH_BB (bb)
1167 tree stmt = last_stmt (bb);
1168 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1170 tree labels = SWITCH_LABELS (stmt);
1171 int old_size = TREE_VEC_LENGTH (labels);
1172 int i, j, new_size = old_size;
1173 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1174 tree default_label;
1176 /* The default label is always the last case in a switch
1177 statement after gimplification. */
1178 default_label = CASE_LABEL (default_case);
1180 /* Look for possible opportunities to merge cases.
1181 Ignore the last element of the label vector because it
1182 must be the default case. */
1183 i = 0;
1184 while (i < old_size - 1)
1186 tree base_case, base_label, base_high;
1187 base_case = TREE_VEC_ELT (labels, i);
1189 gcc_assert (base_case);
1190 base_label = CASE_LABEL (base_case);
1192 /* Discard cases that have the same destination as the
1193 default case. */
1194 if (base_label == default_label)
1196 TREE_VEC_ELT (labels, i) = NULL_TREE;
1197 i++;
1198 new_size--;
1199 continue;
1202 base_high = CASE_HIGH (base_case) ?
1203 CASE_HIGH (base_case) : CASE_LOW (base_case);
1204 i++;
1205 /* Try to merge case labels. Break out when we reach the end
1206 of the label vector or when we cannot merge the next case
1207 label with the current one. */
1208 while (i < old_size - 1)
1210 tree merge_case = TREE_VEC_ELT (labels, i);
1211 tree merge_label = CASE_LABEL (merge_case);
1212 tree t = int_const_binop (PLUS_EXPR, base_high,
1213 integer_one_node, 1);
1215 /* Merge the cases if they jump to the same place,
1216 and their ranges are consecutive. */
1217 if (merge_label == base_label
1218 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1220 base_high = CASE_HIGH (merge_case) ?
1221 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1222 CASE_HIGH (base_case) = base_high;
1223 TREE_VEC_ELT (labels, i) = NULL_TREE;
1224 new_size--;
1225 i++;
1227 else
1228 break;
1232 /* Compress the case labels in the label vector, and adjust the
1233 length of the vector. */
1234 for (i = 0, j = 0; i < new_size; i++)
1236 while (! TREE_VEC_ELT (labels, j))
1237 j++;
1238 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1240 TREE_VEC_LENGTH (labels) = new_size;
1245 /* Checks whether we can merge block B into block A. */
1247 static bool
1248 tree_can_merge_blocks_p (basic_block a, basic_block b)
1250 tree stmt;
1251 block_stmt_iterator bsi;
1253 if (!single_succ_p (a))
1254 return false;
1256 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1257 return false;
1259 if (single_succ (a) != b)
1260 return false;
1262 if (!single_pred_p (b))
1263 return false;
1265 if (b == EXIT_BLOCK_PTR)
1266 return false;
1268 /* If A ends by a statement causing exceptions or something similar, we
1269 cannot merge the blocks. */
1270 stmt = last_stmt (a);
1271 if (stmt && stmt_ends_bb_p (stmt))
1272 return false;
1274 /* Do not allow a block with only a non-local label to be merged. */
1275 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1276 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1277 return false;
1279 /* There may be no PHI nodes at the start of B. */
1280 if (phi_nodes (b))
1281 return false;
1283 /* Do not remove user labels. */
1284 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1286 stmt = bsi_stmt (bsi);
1287 if (TREE_CODE (stmt) != LABEL_EXPR)
1288 break;
1289 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1290 return false;
1293 /* Protect the loop latches. */
1294 if (current_loops
1295 && b->loop_father->latch == b)
1296 return false;
1298 return true;
1302 /* Merge block B into block A. */
1304 static void
1305 tree_merge_blocks (basic_block a, basic_block b)
1307 block_stmt_iterator bsi;
1308 tree_stmt_iterator last;
1310 if (dump_file)
1311 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1313 /* Ensure that B follows A. */
1314 move_block_after (b, a);
1316 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1317 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1319 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1320 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1322 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1324 tree label = bsi_stmt (bsi);
1326 bsi_remove (&bsi);
1327 /* Now that we can thread computed gotos, we might have
1328 a situation where we have a forced label in block B
1329 However, the label at the start of block B might still be
1330 used in other ways (think about the runtime checking for
1331 Fortran assigned gotos). So we can not just delete the
1332 label. Instead we move the label to the start of block A. */
1333 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1335 block_stmt_iterator dest_bsi = bsi_start (a);
1336 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1339 else
1341 set_bb_for_stmt (bsi_stmt (bsi), a);
1342 bsi_next (&bsi);
1346 /* Merge the chains. */
1347 last = tsi_last (a->stmt_list);
1348 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1349 b->stmt_list = NULL;
1353 /* Walk the function tree removing unnecessary statements.
1355 * Empty statement nodes are removed
1357 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1359 * Unnecessary COND_EXPRs are removed
1361 * Some unnecessary BIND_EXPRs are removed
1363 Clearly more work could be done. The trick is doing the analysis
1364 and removal fast enough to be a net improvement in compile times.
1366 Note that when we remove a control structure such as a COND_EXPR
1367 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1368 to ensure we eliminate all the useless code. */
1370 struct rus_data
1372 tree *last_goto;
1373 bool repeat;
1374 bool may_throw;
1375 bool may_branch;
1376 bool has_label;
1379 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1381 static bool
1382 remove_useless_stmts_warn_notreached (tree stmt)
1384 if (EXPR_HAS_LOCATION (stmt))
1386 location_t loc = EXPR_LOCATION (stmt);
1387 if (LOCATION_LINE (loc) > 0)
1389 warning (0, "%Hwill never be executed", &loc);
1390 return true;
1394 switch (TREE_CODE (stmt))
1396 case STATEMENT_LIST:
1398 tree_stmt_iterator i;
1399 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1400 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1401 return true;
1403 break;
1405 case COND_EXPR:
1406 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1407 return true;
1408 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1409 return true;
1410 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1411 return true;
1412 break;
1414 case TRY_FINALLY_EXPR:
1415 case TRY_CATCH_EXPR:
1416 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1417 return true;
1418 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1419 return true;
1420 break;
1422 case CATCH_EXPR:
1423 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1424 case EH_FILTER_EXPR:
1425 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1426 case BIND_EXPR:
1427 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1429 default:
1430 /* Not a live container. */
1431 break;
1434 return false;
1437 static void
1438 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1440 tree then_clause, else_clause, cond;
1441 bool save_has_label, then_has_label, else_has_label;
1443 save_has_label = data->has_label;
1444 data->has_label = false;
1445 data->last_goto = NULL;
1447 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1449 then_has_label = data->has_label;
1450 data->has_label = false;
1451 data->last_goto = NULL;
1453 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1455 else_has_label = data->has_label;
1456 data->has_label = save_has_label | then_has_label | else_has_label;
1458 then_clause = COND_EXPR_THEN (*stmt_p);
1459 else_clause = COND_EXPR_ELSE (*stmt_p);
1460 cond = fold (COND_EXPR_COND (*stmt_p));
1462 /* If neither arm does anything at all, we can remove the whole IF. */
1463 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1465 *stmt_p = build_empty_stmt ();
1466 data->repeat = true;
1469 /* If there are no reachable statements in an arm, then we can
1470 zap the entire conditional. */
1471 else if (integer_nonzerop (cond) && !else_has_label)
1473 if (warn_notreached)
1474 remove_useless_stmts_warn_notreached (else_clause);
1475 *stmt_p = then_clause;
1476 data->repeat = true;
1478 else if (integer_zerop (cond) && !then_has_label)
1480 if (warn_notreached)
1481 remove_useless_stmts_warn_notreached (then_clause);
1482 *stmt_p = else_clause;
1483 data->repeat = true;
1486 /* Check a couple of simple things on then/else with single stmts. */
1487 else
1489 tree then_stmt = expr_only (then_clause);
1490 tree else_stmt = expr_only (else_clause);
1492 /* Notice branches to a common destination. */
1493 if (then_stmt && else_stmt
1494 && TREE_CODE (then_stmt) == GOTO_EXPR
1495 && TREE_CODE (else_stmt) == GOTO_EXPR
1496 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1498 *stmt_p = then_stmt;
1499 data->repeat = true;
1502 /* If the THEN/ELSE clause merely assigns a value to a variable or
1503 parameter which is already known to contain that value, then
1504 remove the useless THEN/ELSE clause. */
1505 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1507 if (else_stmt
1508 && TREE_CODE (else_stmt) == MODIFY_EXPR
1509 && TREE_OPERAND (else_stmt, 0) == cond
1510 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1511 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1513 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1514 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1515 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1516 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1518 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1519 ? then_stmt : else_stmt);
1520 tree *location = (TREE_CODE (cond) == EQ_EXPR
1521 ? &COND_EXPR_THEN (*stmt_p)
1522 : &COND_EXPR_ELSE (*stmt_p));
1524 if (stmt
1525 && TREE_CODE (stmt) == MODIFY_EXPR
1526 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1527 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1528 *location = alloc_stmt_list ();
1532 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1533 would be re-introduced during lowering. */
1534 data->last_goto = NULL;
1538 static void
1539 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1541 bool save_may_branch, save_may_throw;
1542 bool this_may_branch, this_may_throw;
1544 /* Collect may_branch and may_throw information for the body only. */
1545 save_may_branch = data->may_branch;
1546 save_may_throw = data->may_throw;
1547 data->may_branch = false;
1548 data->may_throw = false;
1549 data->last_goto = NULL;
1551 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1553 this_may_branch = data->may_branch;
1554 this_may_throw = data->may_throw;
1555 data->may_branch |= save_may_branch;
1556 data->may_throw |= save_may_throw;
1557 data->last_goto = NULL;
1559 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1561 /* If the body is empty, then we can emit the FINALLY block without
1562 the enclosing TRY_FINALLY_EXPR. */
1563 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1565 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1566 data->repeat = true;
1569 /* If the handler is empty, then we can emit the TRY block without
1570 the enclosing TRY_FINALLY_EXPR. */
1571 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1573 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1574 data->repeat = true;
1577 /* If the body neither throws, nor branches, then we can safely
1578 string the TRY and FINALLY blocks together. */
1579 else if (!this_may_branch && !this_may_throw)
1581 tree stmt = *stmt_p;
1582 *stmt_p = TREE_OPERAND (stmt, 0);
1583 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1584 data->repeat = true;
1589 static void
1590 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1592 bool save_may_throw, this_may_throw;
1593 tree_stmt_iterator i;
1594 tree stmt;
1596 /* Collect may_throw information for the body only. */
1597 save_may_throw = data->may_throw;
1598 data->may_throw = false;
1599 data->last_goto = NULL;
1601 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1603 this_may_throw = data->may_throw;
1604 data->may_throw = save_may_throw;
1606 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1607 if (!this_may_throw)
1609 if (warn_notreached)
1610 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1611 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1612 data->repeat = true;
1613 return;
1616 /* Process the catch clause specially. We may be able to tell that
1617 no exceptions propagate past this point. */
1619 this_may_throw = true;
1620 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1621 stmt = tsi_stmt (i);
1622 data->last_goto = NULL;
1624 switch (TREE_CODE (stmt))
1626 case CATCH_EXPR:
1627 for (; !tsi_end_p (i); tsi_next (&i))
1629 stmt = tsi_stmt (i);
1630 /* If we catch all exceptions, then the body does not
1631 propagate exceptions past this point. */
1632 if (CATCH_TYPES (stmt) == NULL)
1633 this_may_throw = false;
1634 data->last_goto = NULL;
1635 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1637 break;
1639 case EH_FILTER_EXPR:
1640 if (EH_FILTER_MUST_NOT_THROW (stmt))
1641 this_may_throw = false;
1642 else if (EH_FILTER_TYPES (stmt) == NULL)
1643 this_may_throw = false;
1644 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1645 break;
1647 default:
1648 /* Otherwise this is a cleanup. */
1649 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1651 /* If the cleanup is empty, then we can emit the TRY block without
1652 the enclosing TRY_CATCH_EXPR. */
1653 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1655 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1656 data->repeat = true;
1658 break;
1660 data->may_throw |= this_may_throw;
1664 static void
1665 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1667 tree block;
1669 /* First remove anything underneath the BIND_EXPR. */
1670 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1672 /* If the BIND_EXPR has no variables, then we can pull everything
1673 up one level and remove the BIND_EXPR, unless this is the toplevel
1674 BIND_EXPR for the current function or an inlined function.
1676 When this situation occurs we will want to apply this
1677 optimization again. */
1678 block = BIND_EXPR_BLOCK (*stmt_p);
1679 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1680 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1681 && (! block
1682 || ! BLOCK_ABSTRACT_ORIGIN (block)
1683 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1684 != FUNCTION_DECL)))
1686 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1687 data->repeat = true;
1692 static void
1693 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1695 tree dest = GOTO_DESTINATION (*stmt_p);
1697 data->may_branch = true;
1698 data->last_goto = NULL;
1700 /* Record the last goto expr, so that we can delete it if unnecessary. */
1701 if (TREE_CODE (dest) == LABEL_DECL)
1702 data->last_goto = stmt_p;
1706 static void
1707 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1709 tree label = LABEL_EXPR_LABEL (*stmt_p);
1711 data->has_label = true;
1713 /* We do want to jump across non-local label receiver code. */
1714 if (DECL_NONLOCAL (label))
1715 data->last_goto = NULL;
1717 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1719 *data->last_goto = build_empty_stmt ();
1720 data->repeat = true;
1723 /* ??? Add something here to delete unused labels. */
1727 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1728 decl. This allows us to eliminate redundant or useless
1729 calls to "const" functions.
1731 Gimplifier already does the same operation, but we may notice functions
1732 being const and pure once their calls has been gimplified, so we need
1733 to update the flag. */
1735 static void
1736 update_call_expr_flags (tree call)
1738 tree decl = get_callee_fndecl (call);
1739 if (!decl)
1740 return;
1741 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1742 TREE_SIDE_EFFECTS (call) = 0;
1743 if (TREE_NOTHROW (decl))
1744 TREE_NOTHROW (call) = 1;
1748 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1750 void
1751 notice_special_calls (tree t)
1753 int flags = call_expr_flags (t);
1755 if (flags & ECF_MAY_BE_ALLOCA)
1756 current_function_calls_alloca = true;
1757 if (flags & ECF_RETURNS_TWICE)
1758 current_function_calls_setjmp = true;
1762 /* Clear flags set by notice_special_calls. Used by dead code removal
1763 to update the flags. */
1765 void
1766 clear_special_calls (void)
1768 current_function_calls_alloca = false;
1769 current_function_calls_setjmp = false;
1773 static void
1774 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1776 tree t = *tp, op;
1778 switch (TREE_CODE (t))
1780 case COND_EXPR:
1781 remove_useless_stmts_cond (tp, data);
1782 break;
1784 case TRY_FINALLY_EXPR:
1785 remove_useless_stmts_tf (tp, data);
1786 break;
1788 case TRY_CATCH_EXPR:
1789 remove_useless_stmts_tc (tp, data);
1790 break;
1792 case BIND_EXPR:
1793 remove_useless_stmts_bind (tp, data);
1794 break;
1796 case GOTO_EXPR:
1797 remove_useless_stmts_goto (tp, data);
1798 break;
1800 case LABEL_EXPR:
1801 remove_useless_stmts_label (tp, data);
1802 break;
1804 case RETURN_EXPR:
1805 fold_stmt (tp);
1806 data->last_goto = NULL;
1807 data->may_branch = true;
1808 break;
1810 case CALL_EXPR:
1811 fold_stmt (tp);
1812 data->last_goto = NULL;
1813 notice_special_calls (t);
1814 update_call_expr_flags (t);
1815 if (tree_could_throw_p (t))
1816 data->may_throw = true;
1817 break;
1819 case MODIFY_EXPR:
1820 data->last_goto = NULL;
1821 fold_stmt (tp);
1822 op = get_call_expr_in (t);
1823 if (op)
1825 update_call_expr_flags (op);
1826 notice_special_calls (op);
1828 if (tree_could_throw_p (t))
1829 data->may_throw = true;
1830 break;
1832 case STATEMENT_LIST:
1834 tree_stmt_iterator i = tsi_start (t);
1835 while (!tsi_end_p (i))
1837 t = tsi_stmt (i);
1838 if (IS_EMPTY_STMT (t))
1840 tsi_delink (&i);
1841 continue;
1844 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1846 t = tsi_stmt (i);
1847 if (TREE_CODE (t) == STATEMENT_LIST)
1849 tsi_link_before (&i, t, TSI_SAME_STMT);
1850 tsi_delink (&i);
1852 else
1853 tsi_next (&i);
1856 break;
1857 case ASM_EXPR:
1858 fold_stmt (tp);
1859 data->last_goto = NULL;
1860 break;
1862 default:
1863 data->last_goto = NULL;
1864 break;
1868 static void
1869 remove_useless_stmts (void)
1871 struct rus_data data;
1873 clear_special_calls ();
1877 memset (&data, 0, sizeof (data));
1878 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1880 while (data.repeat);
1884 struct tree_opt_pass pass_remove_useless_stmts =
1886 "useless", /* name */
1887 NULL, /* gate */
1888 remove_useless_stmts, /* execute */
1889 NULL, /* sub */
1890 NULL, /* next */
1891 0, /* static_pass_number */
1892 0, /* tv_id */
1893 PROP_gimple_any, /* properties_required */
1894 0, /* properties_provided */
1895 0, /* properties_destroyed */
1896 0, /* todo_flags_start */
1897 TODO_dump_func, /* todo_flags_finish */
1898 0 /* letter */
1901 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1903 static void
1904 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1906 tree phi;
1908 /* Since this block is no longer reachable, we can just delete all
1909 of its PHI nodes. */
1910 phi = phi_nodes (bb);
1911 while (phi)
1913 tree next = PHI_CHAIN (phi);
1914 remove_phi_node (phi, NULL_TREE);
1915 phi = next;
1918 /* Remove edges to BB's successors. */
1919 while (EDGE_COUNT (bb->succs) > 0)
1920 remove_edge (EDGE_SUCC (bb, 0));
1924 /* Remove statements of basic block BB. */
1926 static void
1927 remove_bb (basic_block bb)
1929 block_stmt_iterator i;
1930 #ifdef USE_MAPPED_LOCATION
1931 source_location loc = UNKNOWN_LOCATION;
1932 #else
1933 source_locus loc = 0;
1934 #endif
1936 if (dump_file)
1938 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1939 if (dump_flags & TDF_DETAILS)
1941 dump_bb (bb, dump_file, 0);
1942 fprintf (dump_file, "\n");
1946 /* If we remove the header or the latch of a loop, mark the loop for
1947 removal by setting its header and latch to NULL. */
1948 if (current_loops)
1950 struct loop *loop = bb->loop_father;
1952 if (loop->latch == bb
1953 || loop->header == bb)
1955 loop->latch = NULL;
1956 loop->header = NULL;
1960 /* Remove all the instructions in the block. */
1961 for (i = bsi_start (bb); !bsi_end_p (i);)
1963 tree stmt = bsi_stmt (i);
1964 if (TREE_CODE (stmt) == LABEL_EXPR
1965 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
1967 basic_block new_bb = bb->prev_bb;
1968 block_stmt_iterator new_bsi = bsi_start (new_bb);
1970 bsi_remove (&i);
1971 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
1973 else
1975 release_defs (stmt);
1977 set_bb_for_stmt (stmt, NULL);
1978 bsi_remove (&i);
1981 /* Don't warn for removed gotos. Gotos are often removed due to
1982 jump threading, thus resulting in bogus warnings. Not great,
1983 since this way we lose warnings for gotos in the original
1984 program that are indeed unreachable. */
1985 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1987 #ifdef USE_MAPPED_LOCATION
1988 if (EXPR_HAS_LOCATION (stmt))
1989 loc = EXPR_LOCATION (stmt);
1990 #else
1991 source_locus t;
1992 t = EXPR_LOCUS (stmt);
1993 if (t && LOCATION_LINE (*t) > 0)
1994 loc = t;
1995 #endif
1999 /* If requested, give a warning that the first statement in the
2000 block is unreachable. We walk statements backwards in the
2001 loop above, so the last statement we process is the first statement
2002 in the block. */
2003 #ifdef USE_MAPPED_LOCATION
2004 if (warn_notreached && loc > BUILTINS_LOCATION)
2005 warning (0, "%Hwill never be executed", &loc);
2006 #else
2007 if (warn_notreached && loc)
2008 warning (0, "%Hwill never be executed", loc);
2009 #endif
2011 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2014 /* A list of all the noreturn calls passed to modify_stmt.
2015 cleanup_control_flow uses it to detect cases where a mid-block
2016 indirect call has been turned into a noreturn call. When this
2017 happens, all the instructions after the call are no longer
2018 reachable and must be deleted as dead. */
2020 VEC(tree,gc) *modified_noreturn_calls;
2022 /* Try to remove superfluous control structures. */
2024 static bool
2025 cleanup_control_flow (void)
2027 basic_block bb;
2028 block_stmt_iterator bsi;
2029 bool retval = false;
2030 tree stmt;
2032 /* Detect cases where a mid-block call is now known not to return. */
2033 while (VEC_length (tree, modified_noreturn_calls))
2035 stmt = VEC_pop (tree, modified_noreturn_calls);
2036 bb = bb_for_stmt (stmt);
2037 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2038 split_block (bb, stmt);
2041 FOR_EACH_BB (bb)
2043 bsi = bsi_last (bb);
2045 if (bsi_end_p (bsi))
2046 continue;
2048 stmt = bsi_stmt (bsi);
2049 if (TREE_CODE (stmt) == COND_EXPR
2050 || TREE_CODE (stmt) == SWITCH_EXPR)
2051 retval |= cleanup_control_expr_graph (bb, bsi);
2053 /* If we had a computed goto which has a compile-time determinable
2054 destination, then we can eliminate the goto. */
2055 if (TREE_CODE (stmt) == GOTO_EXPR
2056 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
2057 && TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) == LABEL_DECL)
2059 edge e;
2060 tree label;
2061 edge_iterator ei;
2062 basic_block target_block;
2063 bool removed_edge = false;
2065 /* First look at all the outgoing edges. Delete any outgoing
2066 edges which do not go to the right block. For the one
2067 edge which goes to the right block, fix up its flags. */
2068 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
2069 target_block = label_to_block (label);
2070 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2072 if (e->dest != target_block)
2074 removed_edge = true;
2075 remove_edge (e);
2077 else
2079 /* Turn off the EDGE_ABNORMAL flag. */
2080 e->flags &= ~EDGE_ABNORMAL;
2082 /* And set EDGE_FALLTHRU. */
2083 e->flags |= EDGE_FALLTHRU;
2084 ei_next (&ei);
2088 /* If we removed one or more edges, then we will need to fix the
2089 dominators. It may be possible to incrementally update them. */
2090 if (removed_edge)
2091 free_dominance_info (CDI_DOMINATORS);
2093 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
2094 relevant information we need. */
2095 bsi_remove (&bsi);
2096 retval = true;
2099 /* Check for indirect calls that have been turned into
2100 noreturn calls. */
2101 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2103 free_dominance_info (CDI_DOMINATORS);
2104 retval = true;
2107 return retval;
2111 /* Disconnect an unreachable block in the control expression starting
2112 at block BB. */
2114 static bool
2115 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2117 edge taken_edge;
2118 bool retval = false;
2119 tree expr = bsi_stmt (bsi), val;
2121 if (!single_succ_p (bb))
2123 edge e;
2124 edge_iterator ei;
2126 switch (TREE_CODE (expr))
2128 case COND_EXPR:
2129 val = COND_EXPR_COND (expr);
2130 break;
2132 case SWITCH_EXPR:
2133 val = SWITCH_COND (expr);
2134 if (TREE_CODE (val) != INTEGER_CST)
2135 return false;
2136 break;
2138 default:
2139 gcc_unreachable ();
2142 taken_edge = find_taken_edge (bb, val);
2143 if (!taken_edge)
2144 return false;
2146 /* Remove all the edges except the one that is always executed. */
2147 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2149 if (e != taken_edge)
2151 taken_edge->probability += e->probability;
2152 taken_edge->count += e->count;
2153 remove_edge (e);
2154 retval = true;
2156 else
2157 ei_next (&ei);
2159 if (taken_edge->probability > REG_BR_PROB_BASE)
2160 taken_edge->probability = REG_BR_PROB_BASE;
2162 else
2163 taken_edge = single_succ_edge (bb);
2165 bsi_remove (&bsi);
2166 taken_edge->flags = EDGE_FALLTHRU;
2168 /* We removed some paths from the cfg. */
2169 free_dominance_info (CDI_DOMINATORS);
2171 return retval;
2174 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2176 static bool
2177 remove_fallthru_edge (VEC(edge,gc) *ev)
2179 edge_iterator ei;
2180 edge e;
2182 FOR_EACH_EDGE (e, ei, ev)
2183 if ((e->flags & EDGE_FALLTHRU) != 0)
2185 remove_edge (e);
2186 return true;
2188 return false;
2191 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2192 predicate VAL, return the edge that will be taken out of the block.
2193 If VAL does not match a unique edge, NULL is returned. */
2195 edge
2196 find_taken_edge (basic_block bb, tree val)
2198 tree stmt;
2200 stmt = last_stmt (bb);
2202 gcc_assert (stmt);
2203 gcc_assert (is_ctrl_stmt (stmt));
2204 gcc_assert (val);
2206 if (! is_gimple_min_invariant (val))
2207 return NULL;
2209 if (TREE_CODE (stmt) == COND_EXPR)
2210 return find_taken_edge_cond_expr (bb, val);
2212 if (TREE_CODE (stmt) == SWITCH_EXPR)
2213 return find_taken_edge_switch_expr (bb, val);
2215 if (computed_goto_p (stmt))
2216 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2218 gcc_unreachable ();
2221 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2222 statement, determine which of the outgoing edges will be taken out of the
2223 block. Return NULL if either edge may be taken. */
2225 static edge
2226 find_taken_edge_computed_goto (basic_block bb, tree val)
2228 basic_block dest;
2229 edge e = NULL;
2231 dest = label_to_block (val);
2232 if (dest)
2234 e = find_edge (bb, dest);
2235 gcc_assert (e != NULL);
2238 return e;
2241 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2242 statement, determine which of the two edges will be taken out of the
2243 block. Return NULL if either edge may be taken. */
2245 static edge
2246 find_taken_edge_cond_expr (basic_block bb, tree val)
2248 edge true_edge, false_edge;
2250 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2252 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2253 return (zero_p (val) ? false_edge : true_edge);
2256 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2257 statement, determine which edge will be taken out of the block. Return
2258 NULL if any edge may be taken. */
2260 static edge
2261 find_taken_edge_switch_expr (basic_block bb, tree val)
2263 tree switch_expr, taken_case;
2264 basic_block dest_bb;
2265 edge e;
2267 switch_expr = last_stmt (bb);
2268 taken_case = find_case_label_for_value (switch_expr, val);
2269 dest_bb = label_to_block (CASE_LABEL (taken_case));
2271 e = find_edge (bb, dest_bb);
2272 gcc_assert (e);
2273 return e;
2277 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2278 We can make optimal use here of the fact that the case labels are
2279 sorted: We can do a binary search for a case matching VAL. */
2281 static tree
2282 find_case_label_for_value (tree switch_expr, tree val)
2284 tree vec = SWITCH_LABELS (switch_expr);
2285 size_t low, high, n = TREE_VEC_LENGTH (vec);
2286 tree default_case = TREE_VEC_ELT (vec, n - 1);
2288 for (low = -1, high = n - 1; high - low > 1; )
2290 size_t i = (high + low) / 2;
2291 tree t = TREE_VEC_ELT (vec, i);
2292 int cmp;
2294 /* Cache the result of comparing CASE_LOW and val. */
2295 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2297 if (cmp > 0)
2298 high = i;
2299 else
2300 low = i;
2302 if (CASE_HIGH (t) == NULL)
2304 /* A singe-valued case label. */
2305 if (cmp == 0)
2306 return t;
2308 else
2310 /* A case range. We can only handle integer ranges. */
2311 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2312 return t;
2316 return default_case;
2320 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2321 those alternatives are equal in each of the PHI nodes, then return
2322 true, else return false. */
2324 static bool
2325 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2327 int n1 = e1->dest_idx;
2328 int n2 = e2->dest_idx;
2329 tree phi;
2331 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2333 tree val1 = PHI_ARG_DEF (phi, n1);
2334 tree val2 = PHI_ARG_DEF (phi, n2);
2336 gcc_assert (val1 != NULL_TREE);
2337 gcc_assert (val2 != NULL_TREE);
2339 if (!operand_equal_for_phi_arg_p (val1, val2))
2340 return false;
2343 return true;
2347 /*---------------------------------------------------------------------------
2348 Debugging functions
2349 ---------------------------------------------------------------------------*/
2351 /* Dump tree-specific information of block BB to file OUTF. */
2353 void
2354 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2356 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2360 /* Dump a basic block on stderr. */
2362 void
2363 debug_tree_bb (basic_block bb)
2365 dump_bb (bb, stderr, 0);
2369 /* Dump basic block with index N on stderr. */
2371 basic_block
2372 debug_tree_bb_n (int n)
2374 debug_tree_bb (BASIC_BLOCK (n));
2375 return BASIC_BLOCK (n);
2379 /* Dump the CFG on stderr.
2381 FLAGS are the same used by the tree dumping functions
2382 (see TDF_* in tree.h). */
2384 void
2385 debug_tree_cfg (int flags)
2387 dump_tree_cfg (stderr, flags);
2391 /* Dump the program showing basic block boundaries on the given FILE.
2393 FLAGS are the same used by the tree dumping functions (see TDF_* in
2394 tree.h). */
2396 void
2397 dump_tree_cfg (FILE *file, int flags)
2399 if (flags & TDF_DETAILS)
2401 const char *funcname
2402 = lang_hooks.decl_printable_name (current_function_decl, 2);
2404 fputc ('\n', file);
2405 fprintf (file, ";; Function %s\n\n", funcname);
2406 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2407 n_basic_blocks, n_edges, last_basic_block);
2409 brief_dump_cfg (file);
2410 fprintf (file, "\n");
2413 if (flags & TDF_STATS)
2414 dump_cfg_stats (file);
2416 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2420 /* Dump CFG statistics on FILE. */
2422 void
2423 dump_cfg_stats (FILE *file)
2425 static long max_num_merged_labels = 0;
2426 unsigned long size, total = 0;
2427 long num_edges;
2428 basic_block bb;
2429 const char * const fmt_str = "%-30s%-13s%12s\n";
2430 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2431 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2432 const char *funcname
2433 = lang_hooks.decl_printable_name (current_function_decl, 2);
2436 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2438 fprintf (file, "---------------------------------------------------------\n");
2439 fprintf (file, fmt_str, "", " Number of ", "Memory");
2440 fprintf (file, fmt_str, "", " instances ", "used ");
2441 fprintf (file, "---------------------------------------------------------\n");
2443 size = n_basic_blocks * sizeof (struct basic_block_def);
2444 total += size;
2445 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2446 SCALE (size), LABEL (size));
2448 num_edges = 0;
2449 FOR_EACH_BB (bb)
2450 num_edges += EDGE_COUNT (bb->succs);
2451 size = num_edges * sizeof (struct edge_def);
2452 total += size;
2453 fprintf (file, fmt_str_1, "Edges", num_edges, SCALE (size), LABEL (size));
2455 size = n_basic_blocks * sizeof (struct bb_ann_d);
2456 total += size;
2457 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2458 SCALE (size), LABEL (size));
2460 fprintf (file, "---------------------------------------------------------\n");
2461 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2462 LABEL (total));
2463 fprintf (file, "---------------------------------------------------------\n");
2464 fprintf (file, "\n");
2466 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2467 max_num_merged_labels = cfg_stats.num_merged_labels;
2469 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2470 cfg_stats.num_merged_labels, max_num_merged_labels);
2472 fprintf (file, "\n");
2476 /* Dump CFG statistics on stderr. Keep extern so that it's always
2477 linked in the final executable. */
2479 void
2480 debug_cfg_stats (void)
2482 dump_cfg_stats (stderr);
2486 /* Dump the flowgraph to a .vcg FILE. */
2488 static void
2489 tree_cfg2vcg (FILE *file)
2491 edge e;
2492 edge_iterator ei;
2493 basic_block bb;
2494 const char *funcname
2495 = lang_hooks.decl_printable_name (current_function_decl, 2);
2497 /* Write the file header. */
2498 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2499 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2500 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2502 /* Write blocks and edges. */
2503 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2505 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2506 e->dest->index);
2508 if (e->flags & EDGE_FAKE)
2509 fprintf (file, " linestyle: dotted priority: 10");
2510 else
2511 fprintf (file, " linestyle: solid priority: 100");
2513 fprintf (file, " }\n");
2515 fputc ('\n', file);
2517 FOR_EACH_BB (bb)
2519 enum tree_code head_code, end_code;
2520 const char *head_name, *end_name;
2521 int head_line = 0;
2522 int end_line = 0;
2523 tree first = first_stmt (bb);
2524 tree last = last_stmt (bb);
2526 if (first)
2528 head_code = TREE_CODE (first);
2529 head_name = tree_code_name[head_code];
2530 head_line = get_lineno (first);
2532 else
2533 head_name = "no-statement";
2535 if (last)
2537 end_code = TREE_CODE (last);
2538 end_name = tree_code_name[end_code];
2539 end_line = get_lineno (last);
2541 else
2542 end_name = "no-statement";
2544 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2545 bb->index, bb->index, head_name, head_line, end_name,
2546 end_line);
2548 FOR_EACH_EDGE (e, ei, bb->succs)
2550 if (e->dest == EXIT_BLOCK_PTR)
2551 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2552 else
2553 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2555 if (e->flags & EDGE_FAKE)
2556 fprintf (file, " priority: 10 linestyle: dotted");
2557 else
2558 fprintf (file, " priority: 100 linestyle: solid");
2560 fprintf (file, " }\n");
2563 if (bb->next_bb != EXIT_BLOCK_PTR)
2564 fputc ('\n', file);
2567 fputs ("}\n\n", file);
2572 /*---------------------------------------------------------------------------
2573 Miscellaneous helpers
2574 ---------------------------------------------------------------------------*/
2576 /* Return true if T represents a stmt that always transfers control. */
2578 bool
2579 is_ctrl_stmt (tree t)
2581 return (TREE_CODE (t) == COND_EXPR
2582 || TREE_CODE (t) == SWITCH_EXPR
2583 || TREE_CODE (t) == GOTO_EXPR
2584 || TREE_CODE (t) == RETURN_EXPR
2585 || TREE_CODE (t) == RESX_EXPR);
2589 /* Return true if T is a statement that may alter the flow of control
2590 (e.g., a call to a non-returning function). */
2592 bool
2593 is_ctrl_altering_stmt (tree t)
2595 tree call;
2597 gcc_assert (t);
2598 call = get_call_expr_in (t);
2599 if (call)
2601 /* A non-pure/const CALL_EXPR alters flow control if the current
2602 function has nonlocal labels. */
2603 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2604 return true;
2606 /* A CALL_EXPR also alters control flow if it does not return. */
2607 if (call_expr_flags (call) & ECF_NORETURN)
2608 return true;
2611 /* If a statement can throw, it alters control flow. */
2612 return tree_can_throw_internal (t);
2616 /* Return true if T is a computed goto. */
2618 bool
2619 computed_goto_p (tree t)
2621 return (TREE_CODE (t) == GOTO_EXPR
2622 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2626 /* Checks whether EXPR is a simple local goto. */
2628 bool
2629 simple_goto_p (tree expr)
2631 return (TREE_CODE (expr) == GOTO_EXPR
2632 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2636 /* Return true if T should start a new basic block. PREV_T is the
2637 statement preceding T. It is used when T is a label or a case label.
2638 Labels should only start a new basic block if their previous statement
2639 wasn't a label. Otherwise, sequence of labels would generate
2640 unnecessary basic blocks that only contain a single label. */
2642 static inline bool
2643 stmt_starts_bb_p (tree t, tree prev_t)
2645 if (t == NULL_TREE)
2646 return false;
2648 /* LABEL_EXPRs start a new basic block only if the preceding
2649 statement wasn't a label of the same type. This prevents the
2650 creation of consecutive blocks that have nothing but a single
2651 label. */
2652 if (TREE_CODE (t) == LABEL_EXPR)
2654 /* Nonlocal and computed GOTO targets always start a new block. */
2655 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2656 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2657 return true;
2659 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2661 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2662 return true;
2664 cfg_stats.num_merged_labels++;
2665 return false;
2667 else
2668 return true;
2671 return false;
2675 /* Return true if T should end a basic block. */
2677 bool
2678 stmt_ends_bb_p (tree t)
2680 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2684 /* Add gotos that used to be represented implicitly in the CFG. */
2686 void
2687 disband_implicit_edges (void)
2689 basic_block bb;
2690 block_stmt_iterator last;
2691 edge e;
2692 edge_iterator ei;
2693 tree stmt, label;
2695 FOR_EACH_BB (bb)
2697 last = bsi_last (bb);
2698 stmt = last_stmt (bb);
2700 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2702 /* Remove superfluous gotos from COND_EXPR branches. Moved
2703 from cfg_remove_useless_stmts here since it violates the
2704 invariants for tree--cfg correspondence and thus fits better
2705 here where we do it anyway. */
2706 e = find_edge (bb, bb->next_bb);
2707 if (e)
2709 if (e->flags & EDGE_TRUE_VALUE)
2710 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2711 else if (e->flags & EDGE_FALSE_VALUE)
2712 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2713 else
2714 gcc_unreachable ();
2715 e->flags |= EDGE_FALLTHRU;
2718 continue;
2721 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2723 /* Remove the RETURN_EXPR if we may fall though to the exit
2724 instead. */
2725 gcc_assert (single_succ_p (bb));
2726 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2728 if (bb->next_bb == EXIT_BLOCK_PTR
2729 && !TREE_OPERAND (stmt, 0))
2731 bsi_remove (&last);
2732 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2734 continue;
2737 /* There can be no fallthru edge if the last statement is a control
2738 one. */
2739 if (stmt && is_ctrl_stmt (stmt))
2740 continue;
2742 /* Find a fallthru edge and emit the goto if necessary. */
2743 FOR_EACH_EDGE (e, ei, bb->succs)
2744 if (e->flags & EDGE_FALLTHRU)
2745 break;
2747 if (!e || e->dest == bb->next_bb)
2748 continue;
2750 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2751 label = tree_block_label (e->dest);
2753 stmt = build1 (GOTO_EXPR, void_type_node, label);
2754 #ifdef USE_MAPPED_LOCATION
2755 SET_EXPR_LOCATION (stmt, e->goto_locus);
2756 #else
2757 SET_EXPR_LOCUS (stmt, e->goto_locus);
2758 #endif
2759 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2760 e->flags &= ~EDGE_FALLTHRU;
2764 /* Remove block annotations and other datastructures. */
2766 void
2767 delete_tree_cfg_annotations (void)
2769 basic_block bb;
2770 if (n_basic_blocks > 0)
2771 free_blocks_annotations ();
2773 label_to_block_map = NULL;
2774 FOR_EACH_BB (bb)
2775 bb->rbi = NULL;
2779 /* Return the first statement in basic block BB. */
2781 tree
2782 first_stmt (basic_block bb)
2784 block_stmt_iterator i = bsi_start (bb);
2785 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2789 /* Return the last statement in basic block BB. */
2791 tree
2792 last_stmt (basic_block bb)
2794 block_stmt_iterator b = bsi_last (bb);
2795 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2799 /* Return a pointer to the last statement in block BB. */
2801 tree *
2802 last_stmt_ptr (basic_block bb)
2804 block_stmt_iterator last = bsi_last (bb);
2805 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2809 /* Return the last statement of an otherwise empty block. Return NULL
2810 if the block is totally empty, or if it contains more than one
2811 statement. */
2813 tree
2814 last_and_only_stmt (basic_block bb)
2816 block_stmt_iterator i = bsi_last (bb);
2817 tree last, prev;
2819 if (bsi_end_p (i))
2820 return NULL_TREE;
2822 last = bsi_stmt (i);
2823 bsi_prev (&i);
2824 if (bsi_end_p (i))
2825 return last;
2827 /* Empty statements should no longer appear in the instruction stream.
2828 Everything that might have appeared before should be deleted by
2829 remove_useless_stmts, and the optimizers should just bsi_remove
2830 instead of smashing with build_empty_stmt.
2832 Thus the only thing that should appear here in a block containing
2833 one executable statement is a label. */
2834 prev = bsi_stmt (i);
2835 if (TREE_CODE (prev) == LABEL_EXPR)
2836 return last;
2837 else
2838 return NULL_TREE;
2842 /* Mark BB as the basic block holding statement T. */
2844 void
2845 set_bb_for_stmt (tree t, basic_block bb)
2847 if (TREE_CODE (t) == PHI_NODE)
2848 PHI_BB (t) = bb;
2849 else if (TREE_CODE (t) == STATEMENT_LIST)
2851 tree_stmt_iterator i;
2852 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2853 set_bb_for_stmt (tsi_stmt (i), bb);
2855 else
2857 stmt_ann_t ann = get_stmt_ann (t);
2858 ann->bb = bb;
2860 /* If the statement is a label, add the label to block-to-labels map
2861 so that we can speed up edge creation for GOTO_EXPRs. */
2862 if (TREE_CODE (t) == LABEL_EXPR)
2864 int uid;
2866 t = LABEL_EXPR_LABEL (t);
2867 uid = LABEL_DECL_UID (t);
2868 if (uid == -1)
2870 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2871 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2872 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2874 else
2875 /* We're moving an existing label. Make sure that we've
2876 removed it from the old block. */
2877 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2878 VARRAY_BB (label_to_block_map, uid) = bb;
2883 /* Finds iterator for STMT. */
2885 extern block_stmt_iterator
2886 bsi_for_stmt (tree stmt)
2888 block_stmt_iterator bsi;
2890 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2891 if (bsi_stmt (bsi) == stmt)
2892 return bsi;
2894 gcc_unreachable ();
2897 /* Mark statement T as modified, and update it. */
2898 static inline void
2899 update_modified_stmts (tree t)
2901 if (TREE_CODE (t) == STATEMENT_LIST)
2903 tree_stmt_iterator i;
2904 tree stmt;
2905 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2907 stmt = tsi_stmt (i);
2908 update_stmt_if_modified (stmt);
2911 else
2912 update_stmt_if_modified (t);
2915 /* Insert statement (or statement list) T before the statement
2916 pointed-to by iterator I. M specifies how to update iterator I
2917 after insertion (see enum bsi_iterator_update). */
2919 void
2920 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2922 set_bb_for_stmt (t, i->bb);
2923 update_modified_stmts (t);
2924 tsi_link_before (&i->tsi, t, m);
2928 /* Insert statement (or statement list) T after the statement
2929 pointed-to by iterator I. M specifies how to update iterator I
2930 after insertion (see enum bsi_iterator_update). */
2932 void
2933 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2935 set_bb_for_stmt (t, i->bb);
2936 update_modified_stmts (t);
2937 tsi_link_after (&i->tsi, t, m);
2941 /* Remove the statement pointed to by iterator I. The iterator is updated
2942 to the next statement. */
2944 void
2945 bsi_remove (block_stmt_iterator *i)
2947 tree t = bsi_stmt (*i);
2948 set_bb_for_stmt (t, NULL);
2949 delink_stmt_imm_use (t);
2950 tsi_delink (&i->tsi);
2951 mark_stmt_modified (t);
2955 /* Move the statement at FROM so it comes right after the statement at TO. */
2957 void
2958 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2960 tree stmt = bsi_stmt (*from);
2961 bsi_remove (from);
2962 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2966 /* Move the statement at FROM so it comes right before the statement at TO. */
2968 void
2969 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2971 tree stmt = bsi_stmt (*from);
2972 bsi_remove (from);
2973 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2977 /* Move the statement at FROM to the end of basic block BB. */
2979 void
2980 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2982 block_stmt_iterator last = bsi_last (bb);
2984 /* Have to check bsi_end_p because it could be an empty block. */
2985 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2986 bsi_move_before (from, &last);
2987 else
2988 bsi_move_after (from, &last);
2992 /* Replace the contents of the statement pointed to by iterator BSI
2993 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2994 information of the original statement is preserved. */
2996 void
2997 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2999 int eh_region;
3000 tree orig_stmt = bsi_stmt (*bsi);
3002 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
3003 set_bb_for_stmt (stmt, bsi->bb);
3005 /* Preserve EH region information from the original statement, if
3006 requested by the caller. */
3007 if (preserve_eh_info)
3009 eh_region = lookup_stmt_eh_region (orig_stmt);
3010 if (eh_region >= 0)
3011 add_stmt_to_eh_region (stmt, eh_region);
3014 delink_stmt_imm_use (orig_stmt);
3015 *bsi_stmt_ptr (*bsi) = stmt;
3016 mark_stmt_modified (stmt);
3017 update_modified_stmts (stmt);
3021 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3022 is made to place the statement in an existing basic block, but
3023 sometimes that isn't possible. When it isn't possible, the edge is
3024 split and the statement is added to the new block.
3026 In all cases, the returned *BSI points to the correct location. The
3027 return value is true if insertion should be done after the location,
3028 or false if it should be done before the location. If new basic block
3029 has to be created, it is stored in *NEW_BB. */
3031 static bool
3032 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3033 basic_block *new_bb)
3035 basic_block dest, src;
3036 tree tmp;
3038 dest = e->dest;
3039 restart:
3041 /* If the destination has one predecessor which has no PHI nodes,
3042 insert there. Except for the exit block.
3044 The requirement for no PHI nodes could be relaxed. Basically we
3045 would have to examine the PHIs to prove that none of them used
3046 the value set by the statement we want to insert on E. That
3047 hardly seems worth the effort. */
3048 if (single_pred_p (dest)
3049 && ! phi_nodes (dest)
3050 && dest != EXIT_BLOCK_PTR)
3052 *bsi = bsi_start (dest);
3053 if (bsi_end_p (*bsi))
3054 return true;
3056 /* Make sure we insert after any leading labels. */
3057 tmp = bsi_stmt (*bsi);
3058 while (TREE_CODE (tmp) == LABEL_EXPR)
3060 bsi_next (bsi);
3061 if (bsi_end_p (*bsi))
3062 break;
3063 tmp = bsi_stmt (*bsi);
3066 if (bsi_end_p (*bsi))
3068 *bsi = bsi_last (dest);
3069 return true;
3071 else
3072 return false;
3075 /* If the source has one successor, the edge is not abnormal and
3076 the last statement does not end a basic block, insert there.
3077 Except for the entry block. */
3078 src = e->src;
3079 if ((e->flags & EDGE_ABNORMAL) == 0
3080 && single_succ_p (src)
3081 && src != ENTRY_BLOCK_PTR)
3083 *bsi = bsi_last (src);
3084 if (bsi_end_p (*bsi))
3085 return true;
3087 tmp = bsi_stmt (*bsi);
3088 if (!stmt_ends_bb_p (tmp))
3089 return true;
3091 /* Insert code just before returning the value. We may need to decompose
3092 the return in the case it contains non-trivial operand. */
3093 if (TREE_CODE (tmp) == RETURN_EXPR)
3095 tree op = TREE_OPERAND (tmp, 0);
3096 if (!is_gimple_val (op))
3098 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3099 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3100 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3102 bsi_prev (bsi);
3103 return true;
3107 /* Otherwise, create a new basic block, and split this edge. */
3108 dest = split_edge (e);
3109 if (new_bb)
3110 *new_bb = dest;
3111 e = single_pred_edge (dest);
3112 goto restart;
3116 /* This routine will commit all pending edge insertions, creating any new
3117 basic blocks which are necessary. */
3119 void
3120 bsi_commit_edge_inserts (void)
3122 basic_block bb;
3123 edge e;
3124 edge_iterator ei;
3126 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3128 FOR_EACH_BB (bb)
3129 FOR_EACH_EDGE (e, ei, bb->succs)
3130 bsi_commit_one_edge_insert (e, NULL);
3134 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3135 to this block, otherwise set it to NULL. */
3137 void
3138 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3140 if (new_bb)
3141 *new_bb = NULL;
3142 if (PENDING_STMT (e))
3144 block_stmt_iterator bsi;
3145 tree stmt = PENDING_STMT (e);
3147 PENDING_STMT (e) = NULL_TREE;
3149 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3150 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3151 else
3152 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3157 /* Add STMT to the pending list of edge E. No actual insertion is
3158 made until a call to bsi_commit_edge_inserts () is made. */
3160 void
3161 bsi_insert_on_edge (edge e, tree stmt)
3163 append_to_statement_list (stmt, &PENDING_STMT (e));
3166 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3167 block has to be created, it is returned. */
3169 basic_block
3170 bsi_insert_on_edge_immediate (edge e, tree stmt)
3172 block_stmt_iterator bsi;
3173 basic_block new_bb = NULL;
3175 gcc_assert (!PENDING_STMT (e));
3177 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3178 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3179 else
3180 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3182 return new_bb;
3185 /*---------------------------------------------------------------------------
3186 Tree specific functions for CFG manipulation
3187 ---------------------------------------------------------------------------*/
3189 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3191 static void
3192 reinstall_phi_args (edge new_edge, edge old_edge)
3194 tree var, phi;
3196 if (!PENDING_STMT (old_edge))
3197 return;
3199 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3200 var && phi;
3201 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3203 tree result = TREE_PURPOSE (var);
3204 tree arg = TREE_VALUE (var);
3206 gcc_assert (result == PHI_RESULT (phi));
3208 add_phi_arg (phi, arg, new_edge);
3211 PENDING_STMT (old_edge) = NULL;
3214 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3215 Abort on abnormal edges. */
3217 static basic_block
3218 tree_split_edge (edge edge_in)
3220 basic_block new_bb, after_bb, dest, src;
3221 edge new_edge, e;
3223 /* Abnormal edges cannot be split. */
3224 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3226 src = edge_in->src;
3227 dest = edge_in->dest;
3229 /* Place the new block in the block list. Try to keep the new block
3230 near its "logical" location. This is of most help to humans looking
3231 at debugging dumps. */
3232 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3233 after_bb = edge_in->src;
3234 else
3235 after_bb = dest->prev_bb;
3237 new_bb = create_empty_bb (after_bb);
3238 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3239 new_bb->count = edge_in->count;
3240 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3241 new_edge->probability = REG_BR_PROB_BASE;
3242 new_edge->count = edge_in->count;
3244 e = redirect_edge_and_branch (edge_in, new_bb);
3245 gcc_assert (e);
3246 reinstall_phi_args (new_edge, e);
3248 return new_bb;
3252 /* Return true when BB has label LABEL in it. */
3254 static bool
3255 has_label_p (basic_block bb, tree label)
3257 block_stmt_iterator bsi;
3259 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3261 tree stmt = bsi_stmt (bsi);
3263 if (TREE_CODE (stmt) != LABEL_EXPR)
3264 return false;
3265 if (LABEL_EXPR_LABEL (stmt) == label)
3266 return true;
3268 return false;
3272 /* Callback for walk_tree, check that all elements with address taken are
3273 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3274 inside a PHI node. */
3276 static tree
3277 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3279 tree t = *tp, x;
3280 bool in_phi = (data != NULL);
3282 if (TYPE_P (t))
3283 *walk_subtrees = 0;
3285 /* Check operand N for being valid GIMPLE and give error MSG if not.
3286 We check for constants explicitly since they are not considered
3287 gimple invariants if they overflowed. */
3288 #define CHECK_OP(N, MSG) \
3289 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3290 && !is_gimple_val (TREE_OPERAND (t, N))) \
3291 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3293 switch (TREE_CODE (t))
3295 case SSA_NAME:
3296 if (SSA_NAME_IN_FREE_LIST (t))
3298 error ("SSA name in freelist but still referenced");
3299 return *tp;
3301 break;
3303 case ASSERT_EXPR:
3304 x = fold (ASSERT_EXPR_COND (t));
3305 if (x == boolean_false_node)
3307 error ("ASSERT_EXPR with an always-false condition");
3308 return *tp;
3310 break;
3312 case MODIFY_EXPR:
3313 x = TREE_OPERAND (t, 0);
3314 if (TREE_CODE (x) == BIT_FIELD_REF
3315 && is_gimple_reg (TREE_OPERAND (x, 0)))
3317 error ("GIMPLE register modified with BIT_FIELD_REF");
3318 return t;
3320 break;
3322 case ADDR_EXPR:
3323 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3324 dead PHIs that take the address of something. But if the PHI
3325 result is dead, the fact that it takes the address of anything
3326 is irrelevant. Because we can not tell from here if a PHI result
3327 is dead, we just skip this check for PHIs altogether. This means
3328 we may be missing "valid" checks, but what can you do?
3329 This was PR19217. */
3330 if (in_phi)
3331 break;
3333 /* Skip any references (they will be checked when we recurse down the
3334 tree) and ensure that any variable used as a prefix is marked
3335 addressable. */
3336 for (x = TREE_OPERAND (t, 0);
3337 handled_component_p (x);
3338 x = TREE_OPERAND (x, 0))
3341 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3342 return NULL;
3343 if (!TREE_ADDRESSABLE (x))
3345 error ("address taken, but ADDRESSABLE bit not set");
3346 return x;
3348 break;
3350 case COND_EXPR:
3351 x = COND_EXPR_COND (t);
3352 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3354 error ("non-boolean used in condition");
3355 return x;
3357 break;
3359 case NOP_EXPR:
3360 case CONVERT_EXPR:
3361 case FIX_TRUNC_EXPR:
3362 case FIX_CEIL_EXPR:
3363 case FIX_FLOOR_EXPR:
3364 case FIX_ROUND_EXPR:
3365 case FLOAT_EXPR:
3366 case NEGATE_EXPR:
3367 case ABS_EXPR:
3368 case BIT_NOT_EXPR:
3369 case NON_LVALUE_EXPR:
3370 case TRUTH_NOT_EXPR:
3371 CHECK_OP (0, "Invalid operand to unary operator");
3372 break;
3374 case REALPART_EXPR:
3375 case IMAGPART_EXPR:
3376 case COMPONENT_REF:
3377 case ARRAY_REF:
3378 case ARRAY_RANGE_REF:
3379 case BIT_FIELD_REF:
3380 case VIEW_CONVERT_EXPR:
3381 /* We have a nest of references. Verify that each of the operands
3382 that determine where to reference is either a constant or a variable,
3383 verify that the base is valid, and then show we've already checked
3384 the subtrees. */
3385 while (handled_component_p (t))
3387 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3388 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3389 else if (TREE_CODE (t) == ARRAY_REF
3390 || TREE_CODE (t) == ARRAY_RANGE_REF)
3392 CHECK_OP (1, "Invalid array index.");
3393 if (TREE_OPERAND (t, 2))
3394 CHECK_OP (2, "Invalid array lower bound.");
3395 if (TREE_OPERAND (t, 3))
3396 CHECK_OP (3, "Invalid array stride.");
3398 else if (TREE_CODE (t) == BIT_FIELD_REF)
3400 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3401 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3404 t = TREE_OPERAND (t, 0);
3407 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3409 error ("Invalid reference prefix.");
3410 return t;
3412 *walk_subtrees = 0;
3413 break;
3415 case LT_EXPR:
3416 case LE_EXPR:
3417 case GT_EXPR:
3418 case GE_EXPR:
3419 case EQ_EXPR:
3420 case NE_EXPR:
3421 case UNORDERED_EXPR:
3422 case ORDERED_EXPR:
3423 case UNLT_EXPR:
3424 case UNLE_EXPR:
3425 case UNGT_EXPR:
3426 case UNGE_EXPR:
3427 case UNEQ_EXPR:
3428 case LTGT_EXPR:
3429 case PLUS_EXPR:
3430 case MINUS_EXPR:
3431 case MULT_EXPR:
3432 case TRUNC_DIV_EXPR:
3433 case CEIL_DIV_EXPR:
3434 case FLOOR_DIV_EXPR:
3435 case ROUND_DIV_EXPR:
3436 case TRUNC_MOD_EXPR:
3437 case CEIL_MOD_EXPR:
3438 case FLOOR_MOD_EXPR:
3439 case ROUND_MOD_EXPR:
3440 case RDIV_EXPR:
3441 case EXACT_DIV_EXPR:
3442 case MIN_EXPR:
3443 case MAX_EXPR:
3444 case LSHIFT_EXPR:
3445 case RSHIFT_EXPR:
3446 case LROTATE_EXPR:
3447 case RROTATE_EXPR:
3448 case BIT_IOR_EXPR:
3449 case BIT_XOR_EXPR:
3450 case BIT_AND_EXPR:
3451 CHECK_OP (0, "Invalid operand to binary operator");
3452 CHECK_OP (1, "Invalid operand to binary operator");
3453 break;
3455 default:
3456 break;
3458 return NULL;
3460 #undef CHECK_OP
3464 /* Verify STMT, return true if STMT is not in GIMPLE form.
3465 TODO: Implement type checking. */
3467 static bool
3468 verify_stmt (tree stmt, bool last_in_block)
3470 tree addr;
3472 if (!is_gimple_stmt (stmt))
3474 error ("Is not a valid GIMPLE statement.");
3475 goto fail;
3478 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3479 if (addr)
3481 debug_generic_stmt (addr);
3482 return true;
3485 /* If the statement is marked as part of an EH region, then it is
3486 expected that the statement could throw. Verify that when we
3487 have optimizations that simplify statements such that we prove
3488 that they cannot throw, that we update other data structures
3489 to match. */
3490 if (lookup_stmt_eh_region (stmt) >= 0)
3492 if (!tree_could_throw_p (stmt))
3494 error ("Statement marked for throw, but doesn%'t.");
3495 goto fail;
3497 if (!last_in_block && tree_can_throw_internal (stmt))
3499 error ("Statement marked for throw in middle of block.");
3500 goto fail;
3504 return false;
3506 fail:
3507 debug_generic_stmt (stmt);
3508 return true;
3512 /* Return true when the T can be shared. */
3514 static bool
3515 tree_node_can_be_shared (tree t)
3517 if (IS_TYPE_OR_DECL_P (t)
3518 /* We check for constants explicitly since they are not considered
3519 gimple invariants if they overflowed. */
3520 || CONSTANT_CLASS_P (t)
3521 || is_gimple_min_invariant (t)
3522 || TREE_CODE (t) == SSA_NAME
3523 || t == error_mark_node)
3524 return true;
3526 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3527 return true;
3529 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3530 /* We check for constants explicitly since they are not considered
3531 gimple invariants if they overflowed. */
3532 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3533 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3534 || (TREE_CODE (t) == COMPONENT_REF
3535 || TREE_CODE (t) == REALPART_EXPR
3536 || TREE_CODE (t) == IMAGPART_EXPR))
3537 t = TREE_OPERAND (t, 0);
3539 if (DECL_P (t))
3540 return true;
3542 return false;
3546 /* Called via walk_trees. Verify tree sharing. */
3548 static tree
3549 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3551 htab_t htab = (htab_t) data;
3552 void **slot;
3554 if (tree_node_can_be_shared (*tp))
3556 *walk_subtrees = false;
3557 return NULL;
3560 slot = htab_find_slot (htab, *tp, INSERT);
3561 if (*slot)
3562 return *slot;
3563 *slot = *tp;
3565 return NULL;
3569 /* Verify the GIMPLE statement chain. */
3571 void
3572 verify_stmts (void)
3574 basic_block bb;
3575 block_stmt_iterator bsi;
3576 bool err = false;
3577 htab_t htab;
3578 tree addr;
3580 timevar_push (TV_TREE_STMT_VERIFY);
3581 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3583 FOR_EACH_BB (bb)
3585 tree phi;
3586 int i;
3588 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3590 int phi_num_args = PHI_NUM_ARGS (phi);
3592 if (bb_for_stmt (phi) != bb)
3594 error ("bb_for_stmt (phi) is set to a wrong basic block\n");
3595 err |= true;
3598 for (i = 0; i < phi_num_args; i++)
3600 tree t = PHI_ARG_DEF (phi, i);
3601 tree addr;
3603 /* Addressable variables do have SSA_NAMEs but they
3604 are not considered gimple values. */
3605 if (TREE_CODE (t) != SSA_NAME
3606 && TREE_CODE (t) != FUNCTION_DECL
3607 && !is_gimple_val (t))
3609 error ("PHI def is not a GIMPLE value");
3610 debug_generic_stmt (phi);
3611 debug_generic_stmt (t);
3612 err |= true;
3615 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3616 if (addr)
3618 debug_generic_stmt (addr);
3619 err |= true;
3622 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3623 if (addr)
3625 error ("Incorrect sharing of tree nodes");
3626 debug_generic_stmt (phi);
3627 debug_generic_stmt (addr);
3628 err |= true;
3633 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3635 tree stmt = bsi_stmt (bsi);
3637 if (bb_for_stmt (stmt) != bb)
3639 error ("bb_for_stmt (stmt) is set to a wrong basic block\n");
3640 err |= true;
3643 bsi_next (&bsi);
3644 err |= verify_stmt (stmt, bsi_end_p (bsi));
3645 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3646 if (addr)
3648 error ("Incorrect sharing of tree nodes");
3649 debug_generic_stmt (stmt);
3650 debug_generic_stmt (addr);
3651 err |= true;
3656 if (err)
3657 internal_error ("verify_stmts failed.");
3659 htab_delete (htab);
3660 timevar_pop (TV_TREE_STMT_VERIFY);
3664 /* Verifies that the flow information is OK. */
3666 static int
3667 tree_verify_flow_info (void)
3669 int err = 0;
3670 basic_block bb;
3671 block_stmt_iterator bsi;
3672 tree stmt;
3673 edge e;
3674 edge_iterator ei;
3676 if (ENTRY_BLOCK_PTR->stmt_list)
3678 error ("ENTRY_BLOCK has a statement list associated with it\n");
3679 err = 1;
3682 if (EXIT_BLOCK_PTR->stmt_list)
3684 error ("EXIT_BLOCK has a statement list associated with it\n");
3685 err = 1;
3688 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3689 if (e->flags & EDGE_FALLTHRU)
3691 error ("Fallthru to exit from bb %d\n", e->src->index);
3692 err = 1;
3695 FOR_EACH_BB (bb)
3697 bool found_ctrl_stmt = false;
3699 stmt = NULL_TREE;
3701 /* Skip labels on the start of basic block. */
3702 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3704 tree prev_stmt = stmt;
3706 stmt = bsi_stmt (bsi);
3708 if (TREE_CODE (stmt) != LABEL_EXPR)
3709 break;
3711 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3713 error ("Nonlocal label %s is not first "
3714 "in a sequence of labels in bb %d",
3715 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3716 bb->index);
3717 err = 1;
3720 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3722 error ("Label %s to block does not match in bb %d\n",
3723 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3724 bb->index);
3725 err = 1;
3728 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3729 != current_function_decl)
3731 error ("Label %s has incorrect context in bb %d\n",
3732 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3733 bb->index);
3734 err = 1;
3738 /* Verify that body of basic block BB is free of control flow. */
3739 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3741 tree stmt = bsi_stmt (bsi);
3743 if (found_ctrl_stmt)
3745 error ("Control flow in the middle of basic block %d\n",
3746 bb->index);
3747 err = 1;
3750 if (stmt_ends_bb_p (stmt))
3751 found_ctrl_stmt = true;
3753 if (TREE_CODE (stmt) == LABEL_EXPR)
3755 error ("Label %s in the middle of basic block %d\n",
3756 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3757 bb->index);
3758 err = 1;
3761 bsi = bsi_last (bb);
3762 if (bsi_end_p (bsi))
3763 continue;
3765 stmt = bsi_stmt (bsi);
3767 err |= verify_eh_edges (stmt);
3769 if (is_ctrl_stmt (stmt))
3771 FOR_EACH_EDGE (e, ei, bb->succs)
3772 if (e->flags & EDGE_FALLTHRU)
3774 error ("Fallthru edge after a control statement in bb %d \n",
3775 bb->index);
3776 err = 1;
3780 switch (TREE_CODE (stmt))
3782 case COND_EXPR:
3784 edge true_edge;
3785 edge false_edge;
3786 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3787 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3789 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3790 err = 1;
3793 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3795 if (!true_edge || !false_edge
3796 || !(true_edge->flags & EDGE_TRUE_VALUE)
3797 || !(false_edge->flags & EDGE_FALSE_VALUE)
3798 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3799 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3800 || EDGE_COUNT (bb->succs) >= 3)
3802 error ("Wrong outgoing edge flags at end of bb %d\n",
3803 bb->index);
3804 err = 1;
3807 if (!has_label_p (true_edge->dest,
3808 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3810 error ("%<then%> label does not match edge at end of bb %d\n",
3811 bb->index);
3812 err = 1;
3815 if (!has_label_p (false_edge->dest,
3816 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3818 error ("%<else%> label does not match edge at end of bb %d\n",
3819 bb->index);
3820 err = 1;
3823 break;
3825 case GOTO_EXPR:
3826 if (simple_goto_p (stmt))
3828 error ("Explicit goto at end of bb %d\n", bb->index);
3829 err = 1;
3831 else
3833 /* FIXME. We should double check that the labels in the
3834 destination blocks have their address taken. */
3835 FOR_EACH_EDGE (e, ei, bb->succs)
3836 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3837 | EDGE_FALSE_VALUE))
3838 || !(e->flags & EDGE_ABNORMAL))
3840 error ("Wrong outgoing edge flags at end of bb %d\n",
3841 bb->index);
3842 err = 1;
3845 break;
3847 case RETURN_EXPR:
3848 if (!single_succ_p (bb)
3849 || (single_succ_edge (bb)->flags
3850 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3851 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3853 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3854 err = 1;
3856 if (single_succ (bb) != EXIT_BLOCK_PTR)
3858 error ("Return edge does not point to exit in bb %d\n",
3859 bb->index);
3860 err = 1;
3862 break;
3864 case SWITCH_EXPR:
3866 tree prev;
3867 edge e;
3868 size_t i, n;
3869 tree vec;
3871 vec = SWITCH_LABELS (stmt);
3872 n = TREE_VEC_LENGTH (vec);
3874 /* Mark all the destination basic blocks. */
3875 for (i = 0; i < n; ++i)
3877 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3878 basic_block label_bb = label_to_block (lab);
3880 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3881 label_bb->aux = (void *)1;
3884 /* Verify that the case labels are sorted. */
3885 prev = TREE_VEC_ELT (vec, 0);
3886 for (i = 1; i < n - 1; ++i)
3888 tree c = TREE_VEC_ELT (vec, i);
3889 if (! CASE_LOW (c))
3891 error ("Found default case not at end of case vector");
3892 err = 1;
3893 continue;
3895 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3897 error ("Case labels not sorted:\n ");
3898 print_generic_expr (stderr, prev, 0);
3899 fprintf (stderr," is greater than ");
3900 print_generic_expr (stderr, c, 0);
3901 fprintf (stderr," but comes before it.\n");
3902 err = 1;
3904 prev = c;
3906 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3908 error ("No default case found at end of case vector");
3909 err = 1;
3912 FOR_EACH_EDGE (e, ei, bb->succs)
3914 if (!e->dest->aux)
3916 error ("Extra outgoing edge %d->%d\n",
3917 bb->index, e->dest->index);
3918 err = 1;
3920 e->dest->aux = (void *)2;
3921 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3922 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3924 error ("Wrong outgoing edge flags at end of bb %d\n",
3925 bb->index);
3926 err = 1;
3930 /* Check that we have all of them. */
3931 for (i = 0; i < n; ++i)
3933 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3934 basic_block label_bb = label_to_block (lab);
3936 if (label_bb->aux != (void *)2)
3938 error ("Missing edge %i->%i",
3939 bb->index, label_bb->index);
3940 err = 1;
3944 FOR_EACH_EDGE (e, ei, bb->succs)
3945 e->dest->aux = (void *)0;
3948 default: ;
3952 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3953 verify_dominators (CDI_DOMINATORS);
3955 return err;
3959 /* Updates phi nodes after creating a forwarder block joined
3960 by edge FALLTHRU. */
3962 static void
3963 tree_make_forwarder_block (edge fallthru)
3965 edge e;
3966 edge_iterator ei;
3967 basic_block dummy, bb;
3968 tree phi, new_phi, var;
3970 dummy = fallthru->src;
3971 bb = fallthru->dest;
3973 if (single_pred_p (bb))
3974 return;
3976 /* If we redirected a branch we must create new phi nodes at the
3977 start of BB. */
3978 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3980 var = PHI_RESULT (phi);
3981 new_phi = create_phi_node (var, bb);
3982 SSA_NAME_DEF_STMT (var) = new_phi;
3983 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3984 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3987 /* Ensure that the PHI node chain is in the same order. */
3988 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3990 /* Add the arguments we have stored on edges. */
3991 FOR_EACH_EDGE (e, ei, bb->preds)
3993 if (e == fallthru)
3994 continue;
3996 flush_pending_stmts (e);
4001 /* Return true if basic block BB does nothing except pass control
4002 flow to another block and that we can safely insert a label at
4003 the start of the successor block.
4005 As a precondition, we require that BB be not equal to
4006 ENTRY_BLOCK_PTR. */
4008 static bool
4009 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
4011 block_stmt_iterator bsi;
4013 /* BB must have a single outgoing edge. */
4014 if (single_succ_p (bb) != 1
4015 /* If PHI_WANTED is false, BB must not have any PHI nodes.
4016 Otherwise, BB must have PHI nodes. */
4017 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
4018 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
4019 || single_succ (bb) == EXIT_BLOCK_PTR
4020 /* Nor should this be an infinite loop. */
4021 || single_succ (bb) == bb
4022 /* BB may not have an abnormal outgoing edge. */
4023 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4024 return false;
4026 #if ENABLE_CHECKING
4027 gcc_assert (bb != ENTRY_BLOCK_PTR);
4028 #endif
4030 /* Now walk through the statements backward. We can ignore labels,
4031 anything else means this is not a forwarder block. */
4032 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4034 tree stmt = bsi_stmt (bsi);
4036 switch (TREE_CODE (stmt))
4038 case LABEL_EXPR:
4039 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4040 return false;
4041 break;
4043 default:
4044 return false;
4048 if (find_edge (ENTRY_BLOCK_PTR, bb))
4049 return false;
4051 if (current_loops)
4053 basic_block dest;
4054 /* Protect loop latches, headers and preheaders. */
4055 if (bb->loop_father->header == bb)
4056 return false;
4057 dest = EDGE_SUCC (bb, 0)->dest;
4059 if (dest->loop_father->header == dest)
4060 return false;
4063 return true;
4066 /* Return true if BB has at least one abnormal incoming edge. */
4068 static inline bool
4069 has_abnormal_incoming_edge_p (basic_block bb)
4071 edge e;
4072 edge_iterator ei;
4074 FOR_EACH_EDGE (e, ei, bb->preds)
4075 if (e->flags & EDGE_ABNORMAL)
4076 return true;
4078 return false;
4081 /* Removes forwarder block BB. Returns false if this failed. If a new
4082 forwarder block is created due to redirection of edges, it is
4083 stored to worklist. */
4085 static bool
4086 remove_forwarder_block (basic_block bb, basic_block **worklist)
4088 edge succ = single_succ_edge (bb), e, s;
4089 basic_block dest = succ->dest;
4090 tree label;
4091 tree phi;
4092 edge_iterator ei;
4093 block_stmt_iterator bsi, bsi_to;
4094 bool seen_abnormal_edge = false;
4096 /* We check for infinite loops already in tree_forwarder_block_p.
4097 However it may happen that the infinite loop is created
4098 afterwards due to removal of forwarders. */
4099 if (dest == bb)
4100 return false;
4102 /* If the destination block consists of a nonlocal label, do not merge
4103 it. */
4104 label = first_stmt (dest);
4105 if (label
4106 && TREE_CODE (label) == LABEL_EXPR
4107 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4108 return false;
4110 /* If there is an abnormal edge to basic block BB, but not into
4111 dest, problems might occur during removal of the phi node at out
4112 of ssa due to overlapping live ranges of registers.
4114 If there is an abnormal edge in DEST, the problems would occur
4115 anyway since cleanup_dead_labels would then merge the labels for
4116 two different eh regions, and rest of exception handling code
4117 does not like it.
4119 So if there is an abnormal edge to BB, proceed only if there is
4120 no abnormal edge to DEST and there are no phi nodes in DEST. */
4121 if (has_abnormal_incoming_edge_p (bb))
4123 seen_abnormal_edge = true;
4125 if (has_abnormal_incoming_edge_p (dest)
4126 || phi_nodes (dest) != NULL_TREE)
4127 return false;
4130 /* If there are phi nodes in DEST, and some of the blocks that are
4131 predecessors of BB are also predecessors of DEST, check that the
4132 phi node arguments match. */
4133 if (phi_nodes (dest))
4135 FOR_EACH_EDGE (e, ei, bb->preds)
4137 s = find_edge (e->src, dest);
4138 if (!s)
4139 continue;
4141 if (!phi_alternatives_equal (dest, succ, s))
4142 return false;
4146 /* Redirect the edges. */
4147 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4149 if (e->flags & EDGE_ABNORMAL)
4151 /* If there is an abnormal edge, redirect it anyway, and
4152 move the labels to the new block to make it legal. */
4153 s = redirect_edge_succ_nodup (e, dest);
4155 else
4156 s = redirect_edge_and_branch (e, dest);
4158 if (s == e)
4160 /* Create arguments for the phi nodes, since the edge was not
4161 here before. */
4162 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4163 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4165 else
4167 /* The source basic block might become a forwarder. We know
4168 that it was not a forwarder before, since it used to have
4169 at least two outgoing edges, so we may just add it to
4170 worklist. */
4171 if (tree_forwarder_block_p (s->src, false))
4172 *(*worklist)++ = s->src;
4176 if (seen_abnormal_edge)
4178 /* Move the labels to the new block, so that the redirection of
4179 the abnormal edges works. */
4181 bsi_to = bsi_start (dest);
4182 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4184 label = bsi_stmt (bsi);
4185 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4186 bsi_remove (&bsi);
4187 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4191 /* Update the dominators. */
4192 if (dom_info_available_p (CDI_DOMINATORS))
4194 basic_block dom, dombb, domdest;
4196 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4197 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4198 if (domdest == bb)
4200 /* Shortcut to avoid calling (relatively expensive)
4201 nearest_common_dominator unless necessary. */
4202 dom = dombb;
4204 else
4205 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4207 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4210 /* And kill the forwarder block. */
4211 delete_basic_block (bb);
4213 return true;
4216 /* Removes forwarder blocks. */
4218 static bool
4219 cleanup_forwarder_blocks (void)
4221 basic_block bb;
4222 bool changed = false;
4223 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4224 basic_block *current = worklist;
4226 FOR_EACH_BB (bb)
4228 if (tree_forwarder_block_p (bb, false))
4229 *current++ = bb;
4232 while (current != worklist)
4234 bb = *--current;
4235 changed |= remove_forwarder_block (bb, &current);
4238 free (worklist);
4239 return changed;
4242 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4244 static void
4245 remove_forwarder_block_with_phi (basic_block bb)
4247 edge succ = single_succ_edge (bb);
4248 basic_block dest = succ->dest;
4249 tree label;
4250 basic_block dombb, domdest, dom;
4252 /* We check for infinite loops already in tree_forwarder_block_p.
4253 However it may happen that the infinite loop is created
4254 afterwards due to removal of forwarders. */
4255 if (dest == bb)
4256 return;
4258 /* If the destination block consists of a nonlocal label, do not
4259 merge it. */
4260 label = first_stmt (dest);
4261 if (label
4262 && TREE_CODE (label) == LABEL_EXPR
4263 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4264 return;
4266 /* Redirect each incoming edge to BB to DEST. */
4267 while (EDGE_COUNT (bb->preds) > 0)
4269 edge e = EDGE_PRED (bb, 0), s;
4270 tree phi;
4272 s = find_edge (e->src, dest);
4273 if (s)
4275 /* We already have an edge S from E->src to DEST. If S and
4276 E->dest's sole successor edge have the same PHI arguments
4277 at DEST, redirect S to DEST. */
4278 if (phi_alternatives_equal (dest, s, succ))
4280 e = redirect_edge_and_branch (e, dest);
4281 PENDING_STMT (e) = NULL_TREE;
4282 continue;
4285 /* PHI arguments are different. Create a forwarder block by
4286 splitting E so that we can merge PHI arguments on E to
4287 DEST. */
4288 e = single_succ_edge (split_edge (e));
4291 s = redirect_edge_and_branch (e, dest);
4293 /* redirect_edge_and_branch must not create a new edge. */
4294 gcc_assert (s == e);
4296 /* Add to the PHI nodes at DEST each PHI argument removed at the
4297 destination of E. */
4298 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4300 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4302 if (TREE_CODE (def) == SSA_NAME)
4304 tree var;
4306 /* If DEF is one of the results of PHI nodes removed during
4307 redirection, replace it with the PHI argument that used
4308 to be on E. */
4309 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4311 tree old_arg = TREE_PURPOSE (var);
4312 tree new_arg = TREE_VALUE (var);
4314 if (def == old_arg)
4316 def = new_arg;
4317 break;
4322 add_phi_arg (phi, def, s);
4325 PENDING_STMT (e) = NULL;
4328 /* Update the dominators. */
4329 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4330 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4331 if (domdest == bb)
4333 /* Shortcut to avoid calling (relatively expensive)
4334 nearest_common_dominator unless necessary. */
4335 dom = dombb;
4337 else
4338 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4340 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4342 /* Remove BB since all of BB's incoming edges have been redirected
4343 to DEST. */
4344 delete_basic_block (bb);
4347 /* This pass merges PHI nodes if one feeds into another. For example,
4348 suppose we have the following:
4350 goto <bb 9> (<L9>);
4352 <L8>:;
4353 tem_17 = foo ();
4355 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4356 <L9>:;
4358 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4359 <L10>:;
4361 Then we merge the first PHI node into the second one like so:
4363 goto <bb 9> (<L10>);
4365 <L8>:;
4366 tem_17 = foo ();
4368 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4369 <L10>:;
4372 static void
4373 merge_phi_nodes (void)
4375 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4376 basic_block *current = worklist;
4377 basic_block bb;
4379 calculate_dominance_info (CDI_DOMINATORS);
4381 /* Find all PHI nodes that we may be able to merge. */
4382 FOR_EACH_BB (bb)
4384 basic_block dest;
4386 /* Look for a forwarder block with PHI nodes. */
4387 if (!tree_forwarder_block_p (bb, true))
4388 continue;
4390 dest = single_succ (bb);
4392 /* We have to feed into another basic block with PHI
4393 nodes. */
4394 if (!phi_nodes (dest)
4395 /* We don't want to deal with a basic block with
4396 abnormal edges. */
4397 || has_abnormal_incoming_edge_p (bb))
4398 continue;
4400 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4402 /* If BB does not dominate DEST, then the PHI nodes at
4403 DEST must be the only users of the results of the PHI
4404 nodes at BB. */
4405 *current++ = bb;
4409 /* Now let's drain WORKLIST. */
4410 while (current != worklist)
4412 bb = *--current;
4413 remove_forwarder_block_with_phi (bb);
4416 free (worklist);
4419 static bool
4420 gate_merge_phi (void)
4422 return 1;
4425 struct tree_opt_pass pass_merge_phi = {
4426 "mergephi", /* name */
4427 gate_merge_phi, /* gate */
4428 merge_phi_nodes, /* execute */
4429 NULL, /* sub */
4430 NULL, /* next */
4431 0, /* static_pass_number */
4432 TV_TREE_MERGE_PHI, /* tv_id */
4433 PROP_cfg | PROP_ssa, /* properties_required */
4434 0, /* properties_provided */
4435 0, /* properties_destroyed */
4436 0, /* todo_flags_start */
4437 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4438 | TODO_verify_ssa,
4439 0 /* letter */
4442 /* Return a non-special label in the head of basic block BLOCK.
4443 Create one if it doesn't exist. */
4445 tree
4446 tree_block_label (basic_block bb)
4448 block_stmt_iterator i, s = bsi_start (bb);
4449 bool first = true;
4450 tree label, stmt;
4452 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4454 stmt = bsi_stmt (i);
4455 if (TREE_CODE (stmt) != LABEL_EXPR)
4456 break;
4457 label = LABEL_EXPR_LABEL (stmt);
4458 if (!DECL_NONLOCAL (label))
4460 if (!first)
4461 bsi_move_before (&i, &s);
4462 return label;
4466 label = create_artificial_label ();
4467 stmt = build1 (LABEL_EXPR, void_type_node, label);
4468 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4469 return label;
4473 /* Attempt to perform edge redirection by replacing a possibly complex
4474 jump instruction by a goto or by removing the jump completely.
4475 This can apply only if all edges now point to the same block. The
4476 parameters and return values are equivalent to
4477 redirect_edge_and_branch. */
4479 static edge
4480 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4482 basic_block src = e->src;
4483 block_stmt_iterator b;
4484 tree stmt;
4486 /* We can replace or remove a complex jump only when we have exactly
4487 two edges. */
4488 if (EDGE_COUNT (src->succs) != 2
4489 /* Verify that all targets will be TARGET. Specifically, the
4490 edge that is not E must also go to TARGET. */
4491 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4492 return NULL;
4494 b = bsi_last (src);
4495 if (bsi_end_p (b))
4496 return NULL;
4497 stmt = bsi_stmt (b);
4499 if (TREE_CODE (stmt) == COND_EXPR
4500 || TREE_CODE (stmt) == SWITCH_EXPR)
4502 bsi_remove (&b);
4503 e = ssa_redirect_edge (e, target);
4504 e->flags = EDGE_FALLTHRU;
4505 return e;
4508 return NULL;
4512 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4513 edge representing the redirected branch. */
4515 static edge
4516 tree_redirect_edge_and_branch (edge e, basic_block dest)
4518 basic_block bb = e->src;
4519 block_stmt_iterator bsi;
4520 edge ret;
4521 tree label, stmt;
4523 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4524 return NULL;
4526 if (e->src != ENTRY_BLOCK_PTR
4527 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4528 return ret;
4530 if (e->dest == dest)
4531 return NULL;
4533 label = tree_block_label (dest);
4535 bsi = bsi_last (bb);
4536 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4538 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4540 case COND_EXPR:
4541 stmt = (e->flags & EDGE_TRUE_VALUE
4542 ? COND_EXPR_THEN (stmt)
4543 : COND_EXPR_ELSE (stmt));
4544 GOTO_DESTINATION (stmt) = label;
4545 break;
4547 case GOTO_EXPR:
4548 /* No non-abnormal edges should lead from a non-simple goto, and
4549 simple ones should be represented implicitly. */
4550 gcc_unreachable ();
4552 case SWITCH_EXPR:
4554 tree cases = get_cases_for_edge (e, stmt);
4556 /* If we have a list of cases associated with E, then use it
4557 as it's a lot faster than walking the entire case vector. */
4558 if (cases)
4560 edge e2 = find_edge (e->src, dest);
4561 tree last, first;
4563 first = cases;
4564 while (cases)
4566 last = cases;
4567 CASE_LABEL (cases) = label;
4568 cases = TREE_CHAIN (cases);
4571 /* If there was already an edge in the CFG, then we need
4572 to move all the cases associated with E to E2. */
4573 if (e2)
4575 tree cases2 = get_cases_for_edge (e2, stmt);
4577 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4578 TREE_CHAIN (cases2) = first;
4581 else
4583 tree vec = SWITCH_LABELS (stmt);
4584 size_t i, n = TREE_VEC_LENGTH (vec);
4586 for (i = 0; i < n; i++)
4588 tree elt = TREE_VEC_ELT (vec, i);
4590 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4591 CASE_LABEL (elt) = label;
4595 break;
4598 case RETURN_EXPR:
4599 bsi_remove (&bsi);
4600 e->flags |= EDGE_FALLTHRU;
4601 break;
4603 default:
4604 /* Otherwise it must be a fallthru edge, and we don't need to
4605 do anything besides redirecting it. */
4606 gcc_assert (e->flags & EDGE_FALLTHRU);
4607 break;
4610 /* Update/insert PHI nodes as necessary. */
4612 /* Now update the edges in the CFG. */
4613 e = ssa_redirect_edge (e, dest);
4615 return e;
4619 /* Simple wrapper, as we can always redirect fallthru edges. */
4621 static basic_block
4622 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4624 e = tree_redirect_edge_and_branch (e, dest);
4625 gcc_assert (e);
4627 return NULL;
4631 /* Splits basic block BB after statement STMT (but at least after the
4632 labels). If STMT is NULL, BB is split just after the labels. */
4634 static basic_block
4635 tree_split_block (basic_block bb, void *stmt)
4637 block_stmt_iterator bsi, bsi_tgt;
4638 tree act;
4639 basic_block new_bb;
4640 edge e;
4641 edge_iterator ei;
4643 new_bb = create_empty_bb (bb);
4645 /* Redirect the outgoing edges. */
4646 new_bb->succs = bb->succs;
4647 bb->succs = NULL;
4648 FOR_EACH_EDGE (e, ei, new_bb->succs)
4649 e->src = new_bb;
4651 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4652 stmt = NULL;
4654 /* Move everything from BSI to the new basic block. */
4655 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4657 act = bsi_stmt (bsi);
4658 if (TREE_CODE (act) == LABEL_EXPR)
4659 continue;
4661 if (!stmt)
4662 break;
4664 if (stmt == act)
4666 bsi_next (&bsi);
4667 break;
4671 bsi_tgt = bsi_start (new_bb);
4672 while (!bsi_end_p (bsi))
4674 act = bsi_stmt (bsi);
4675 bsi_remove (&bsi);
4676 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4679 return new_bb;
4683 /* Moves basic block BB after block AFTER. */
4685 static bool
4686 tree_move_block_after (basic_block bb, basic_block after)
4688 if (bb->prev_bb == after)
4689 return true;
4691 unlink_block (bb);
4692 link_block (bb, after);
4694 return true;
4698 /* Return true if basic_block can be duplicated. */
4700 static bool
4701 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4703 return true;
4707 /* Create a duplicate of the basic block BB. NOTE: This does not
4708 preserve SSA form. */
4710 static basic_block
4711 tree_duplicate_bb (basic_block bb)
4713 basic_block new_bb;
4714 block_stmt_iterator bsi, bsi_tgt;
4715 tree phi;
4717 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4719 /* Copy the PHI nodes. We ignore PHI node arguments here because
4720 the incoming edges have not been setup yet. */
4721 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4723 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4724 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4727 /* Keep the chain of PHI nodes in the same order so that they can be
4728 updated by ssa_redirect_edge. */
4729 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4731 bsi_tgt = bsi_start (new_bb);
4732 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4734 def_operand_p def_p;
4735 ssa_op_iter op_iter;
4736 tree stmt, copy;
4737 int region;
4739 stmt = bsi_stmt (bsi);
4740 if (TREE_CODE (stmt) == LABEL_EXPR)
4741 continue;
4743 /* Create a new copy of STMT and duplicate STMT's virtual
4744 operands. */
4745 copy = unshare_expr (stmt);
4746 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4747 copy_virtual_operands (copy, stmt);
4748 region = lookup_stmt_eh_region (stmt);
4749 if (region >= 0)
4750 add_stmt_to_eh_region (copy, region);
4752 /* Create new names for all the definitions created by COPY and
4753 add replacement mappings for each new name. */
4754 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4755 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4758 return new_bb;
4762 /* Basic block BB_COPY was created by code duplication. Add phi node
4763 arguments for edges going out of BB_COPY. The blocks that were
4764 duplicated have rbi->duplicated set to one. */
4766 void
4767 add_phi_args_after_copy_bb (basic_block bb_copy)
4769 basic_block bb, dest;
4770 edge e, e_copy;
4771 edge_iterator ei;
4772 tree phi, phi_copy, phi_next, def;
4774 bb = bb_copy->rbi->original;
4776 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4778 if (!phi_nodes (e_copy->dest))
4779 continue;
4781 if (e_copy->dest->rbi->duplicated)
4782 dest = e_copy->dest->rbi->original;
4783 else
4784 dest = e_copy->dest;
4786 e = find_edge (bb, dest);
4787 if (!e)
4789 /* During loop unrolling the target of the latch edge is copied.
4790 In this case we are not looking for edge to dest, but to
4791 duplicated block whose original was dest. */
4792 FOR_EACH_EDGE (e, ei, bb->succs)
4793 if (e->dest->rbi->duplicated
4794 && e->dest->rbi->original == dest)
4795 break;
4797 gcc_assert (e != NULL);
4800 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4801 phi;
4802 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4804 phi_next = PHI_CHAIN (phi);
4805 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4806 add_phi_arg (phi_copy, def, e_copy);
4811 /* Blocks in REGION_COPY array of length N_REGION were created by
4812 duplication of basic blocks. Add phi node arguments for edges
4813 going from these blocks. */
4815 void
4816 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4818 unsigned i;
4820 for (i = 0; i < n_region; i++)
4821 region_copy[i]->rbi->duplicated = 1;
4823 for (i = 0; i < n_region; i++)
4824 add_phi_args_after_copy_bb (region_copy[i]);
4826 for (i = 0; i < n_region; i++)
4827 region_copy[i]->rbi->duplicated = 0;
4830 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4831 important exit edge EXIT. By important we mean that no SSA name defined
4832 inside region is live over the other exit edges of the region. All entry
4833 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4834 to the duplicate of the region. SSA form, dominance and loop information
4835 is updated. The new basic blocks are stored to REGION_COPY in the same
4836 order as they had in REGION, provided that REGION_COPY is not NULL.
4837 The function returns false if it is unable to copy the region,
4838 true otherwise. */
4840 bool
4841 tree_duplicate_sese_region (edge entry, edge exit,
4842 basic_block *region, unsigned n_region,
4843 basic_block *region_copy)
4845 unsigned i, n_doms;
4846 bool free_region_copy = false, copying_header = false;
4847 struct loop *loop = entry->dest->loop_father;
4848 edge exit_copy;
4849 basic_block *doms;
4850 edge redirected;
4852 if (!can_copy_bbs_p (region, n_region))
4853 return false;
4855 /* Some sanity checking. Note that we do not check for all possible
4856 missuses of the functions. I.e. if you ask to copy something weird,
4857 it will work, but the state of structures probably will not be
4858 correct. */
4859 for (i = 0; i < n_region; i++)
4861 /* We do not handle subloops, i.e. all the blocks must belong to the
4862 same loop. */
4863 if (region[i]->loop_father != loop)
4864 return false;
4866 if (region[i] != entry->dest
4867 && region[i] == loop->header)
4868 return false;
4871 loop->copy = loop;
4873 /* In case the function is used for loop header copying (which is the primary
4874 use), ensure that EXIT and its copy will be new latch and entry edges. */
4875 if (loop->header == entry->dest)
4877 copying_header = true;
4878 loop->copy = loop->outer;
4880 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4881 return false;
4883 for (i = 0; i < n_region; i++)
4884 if (region[i] != exit->src
4885 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4886 return false;
4889 if (!region_copy)
4891 region_copy = xmalloc (sizeof (basic_block) * n_region);
4892 free_region_copy = true;
4895 gcc_assert (!need_ssa_update_p ());
4897 /* Record blocks outside the region that are duplicated by something
4898 inside. */
4899 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4900 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4902 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4904 if (copying_header)
4906 loop->header = exit->dest;
4907 loop->latch = exit->src;
4910 /* Redirect the entry and add the phi node arguments. */
4911 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4912 gcc_assert (redirected != NULL);
4913 flush_pending_stmts (entry);
4915 /* Concerning updating of dominators: We must recount dominators
4916 for entry block and its copy. Anything that is outside of the
4917 region, but was dominated by something inside needs recounting as
4918 well. */
4919 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4920 doms[n_doms++] = entry->dest->rbi->original;
4921 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4922 free (doms);
4924 /* Add the other PHI node arguments. */
4925 add_phi_args_after_copy (region_copy, n_region);
4927 /* Update the SSA web. */
4928 update_ssa (TODO_update_ssa);
4930 if (free_region_copy)
4931 free (region_copy);
4933 return true;
4937 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4939 void
4940 dump_function_to_file (tree fn, FILE *file, int flags)
4942 tree arg, vars, var;
4943 bool ignore_topmost_bind = false, any_var = false;
4944 basic_block bb;
4945 tree chain;
4947 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4949 arg = DECL_ARGUMENTS (fn);
4950 while (arg)
4952 print_generic_expr (file, arg, dump_flags);
4953 if (TREE_CHAIN (arg))
4954 fprintf (file, ", ");
4955 arg = TREE_CHAIN (arg);
4957 fprintf (file, ")\n");
4959 if (flags & TDF_DETAILS)
4960 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4961 if (flags & TDF_RAW)
4963 dump_node (fn, TDF_SLIM | flags, file);
4964 return;
4967 /* When GIMPLE is lowered, the variables are no longer available in
4968 BIND_EXPRs, so display them separately. */
4969 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4971 ignore_topmost_bind = true;
4973 fprintf (file, "{\n");
4974 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4976 var = TREE_VALUE (vars);
4978 print_generic_decl (file, var, flags);
4979 fprintf (file, "\n");
4981 any_var = true;
4985 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4987 /* Make a CFG based dump. */
4988 check_bb_profile (ENTRY_BLOCK_PTR, file);
4989 if (!ignore_topmost_bind)
4990 fprintf (file, "{\n");
4992 if (any_var && n_basic_blocks)
4993 fprintf (file, "\n");
4995 FOR_EACH_BB (bb)
4996 dump_generic_bb (file, bb, 2, flags);
4998 fprintf (file, "}\n");
4999 check_bb_profile (EXIT_BLOCK_PTR, file);
5001 else
5003 int indent;
5005 /* Make a tree based dump. */
5006 chain = DECL_SAVED_TREE (fn);
5008 if (TREE_CODE (chain) == BIND_EXPR)
5010 if (ignore_topmost_bind)
5012 chain = BIND_EXPR_BODY (chain);
5013 indent = 2;
5015 else
5016 indent = 0;
5018 else
5020 if (!ignore_topmost_bind)
5021 fprintf (file, "{\n");
5022 indent = 2;
5025 if (any_var)
5026 fprintf (file, "\n");
5028 print_generic_stmt_indented (file, chain, flags, indent);
5029 if (ignore_topmost_bind)
5030 fprintf (file, "}\n");
5033 fprintf (file, "\n\n");
5037 /* Pretty print of the loops intermediate representation. */
5038 static void print_loop (FILE *, struct loop *, int);
5039 static void print_pred_bbs (FILE *, basic_block bb);
5040 static void print_succ_bbs (FILE *, basic_block bb);
5043 /* Print the predecessors indexes of edge E on FILE. */
5045 static void
5046 print_pred_bbs (FILE *file, basic_block bb)
5048 edge e;
5049 edge_iterator ei;
5051 FOR_EACH_EDGE (e, ei, bb->preds)
5052 fprintf (file, "bb_%d", e->src->index);
5056 /* Print the successors indexes of edge E on FILE. */
5058 static void
5059 print_succ_bbs (FILE *file, basic_block bb)
5061 edge e;
5062 edge_iterator ei;
5064 FOR_EACH_EDGE (e, ei, bb->succs)
5065 fprintf (file, "bb_%d", e->src->index);
5069 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5071 static void
5072 print_loop (FILE *file, struct loop *loop, int indent)
5074 char *s_indent;
5075 basic_block bb;
5077 if (loop == NULL)
5078 return;
5080 s_indent = (char *) alloca ((size_t) indent + 1);
5081 memset ((void *) s_indent, ' ', (size_t) indent);
5082 s_indent[indent] = '\0';
5084 /* Print the loop's header. */
5085 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5087 /* Print the loop's body. */
5088 fprintf (file, "%s{\n", s_indent);
5089 FOR_EACH_BB (bb)
5090 if (bb->loop_father == loop)
5092 /* Print the basic_block's header. */
5093 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5094 print_pred_bbs (file, bb);
5095 fprintf (file, "}, succs = {");
5096 print_succ_bbs (file, bb);
5097 fprintf (file, "})\n");
5099 /* Print the basic_block's body. */
5100 fprintf (file, "%s {\n", s_indent);
5101 tree_dump_bb (bb, file, indent + 4);
5102 fprintf (file, "%s }\n", s_indent);
5105 print_loop (file, loop->inner, indent + 2);
5106 fprintf (file, "%s}\n", s_indent);
5107 print_loop (file, loop->next, indent);
5111 /* Follow a CFG edge from the entry point of the program, and on entry
5112 of a loop, pretty print the loop structure on FILE. */
5114 void
5115 print_loop_ir (FILE *file)
5117 basic_block bb;
5119 bb = BASIC_BLOCK (0);
5120 if (bb && bb->loop_father)
5121 print_loop (file, bb->loop_father, 0);
5125 /* Debugging loops structure at tree level. */
5127 void
5128 debug_loop_ir (void)
5130 print_loop_ir (stderr);
5134 /* Return true if BB ends with a call, possibly followed by some
5135 instructions that must stay with the call. Return false,
5136 otherwise. */
5138 static bool
5139 tree_block_ends_with_call_p (basic_block bb)
5141 block_stmt_iterator bsi = bsi_last (bb);
5142 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5146 /* Return true if BB ends with a conditional branch. Return false,
5147 otherwise. */
5149 static bool
5150 tree_block_ends_with_condjump_p (basic_block bb)
5152 tree stmt = last_stmt (bb);
5153 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5157 /* Return true if we need to add fake edge to exit at statement T.
5158 Helper function for tree_flow_call_edges_add. */
5160 static bool
5161 need_fake_edge_p (tree t)
5163 tree call;
5165 /* NORETURN and LONGJMP calls already have an edge to exit.
5166 CONST and PURE calls do not need one.
5167 We don't currently check for CONST and PURE here, although
5168 it would be a good idea, because those attributes are
5169 figured out from the RTL in mark_constant_function, and
5170 the counter incrementation code from -fprofile-arcs
5171 leads to different results from -fbranch-probabilities. */
5172 call = get_call_expr_in (t);
5173 if (call
5174 && !(call_expr_flags (call) & ECF_NORETURN))
5175 return true;
5177 if (TREE_CODE (t) == ASM_EXPR
5178 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5179 return true;
5181 return false;
5185 /* Add fake edges to the function exit for any non constant and non
5186 noreturn calls, volatile inline assembly in the bitmap of blocks
5187 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5188 the number of blocks that were split.
5190 The goal is to expose cases in which entering a basic block does
5191 not imply that all subsequent instructions must be executed. */
5193 static int
5194 tree_flow_call_edges_add (sbitmap blocks)
5196 int i;
5197 int blocks_split = 0;
5198 int last_bb = last_basic_block;
5199 bool check_last_block = false;
5201 if (n_basic_blocks == 0)
5202 return 0;
5204 if (! blocks)
5205 check_last_block = true;
5206 else
5207 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5209 /* In the last basic block, before epilogue generation, there will be
5210 a fallthru edge to EXIT. Special care is required if the last insn
5211 of the last basic block is a call because make_edge folds duplicate
5212 edges, which would result in the fallthru edge also being marked
5213 fake, which would result in the fallthru edge being removed by
5214 remove_fake_edges, which would result in an invalid CFG.
5216 Moreover, we can't elide the outgoing fake edge, since the block
5217 profiler needs to take this into account in order to solve the minimal
5218 spanning tree in the case that the call doesn't return.
5220 Handle this by adding a dummy instruction in a new last basic block. */
5221 if (check_last_block)
5223 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5224 block_stmt_iterator bsi = bsi_last (bb);
5225 tree t = NULL_TREE;
5226 if (!bsi_end_p (bsi))
5227 t = bsi_stmt (bsi);
5229 if (need_fake_edge_p (t))
5231 edge e;
5233 e = find_edge (bb, EXIT_BLOCK_PTR);
5234 if (e)
5236 bsi_insert_on_edge (e, build_empty_stmt ());
5237 bsi_commit_edge_inserts ();
5242 /* Now add fake edges to the function exit for any non constant
5243 calls since there is no way that we can determine if they will
5244 return or not... */
5245 for (i = 0; i < last_bb; i++)
5247 basic_block bb = BASIC_BLOCK (i);
5248 block_stmt_iterator bsi;
5249 tree stmt, last_stmt;
5251 if (!bb)
5252 continue;
5254 if (blocks && !TEST_BIT (blocks, i))
5255 continue;
5257 bsi = bsi_last (bb);
5258 if (!bsi_end_p (bsi))
5260 last_stmt = bsi_stmt (bsi);
5263 stmt = bsi_stmt (bsi);
5264 if (need_fake_edge_p (stmt))
5266 edge e;
5267 /* The handling above of the final block before the
5268 epilogue should be enough to verify that there is
5269 no edge to the exit block in CFG already.
5270 Calling make_edge in such case would cause us to
5271 mark that edge as fake and remove it later. */
5272 #ifdef ENABLE_CHECKING
5273 if (stmt == last_stmt)
5275 e = find_edge (bb, EXIT_BLOCK_PTR);
5276 gcc_assert (e == NULL);
5278 #endif
5280 /* Note that the following may create a new basic block
5281 and renumber the existing basic blocks. */
5282 if (stmt != last_stmt)
5284 e = split_block (bb, stmt);
5285 if (e)
5286 blocks_split++;
5288 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5290 bsi_prev (&bsi);
5292 while (!bsi_end_p (bsi));
5296 if (blocks_split)
5297 verify_flow_info ();
5299 return blocks_split;
5302 bool
5303 tree_purge_dead_eh_edges (basic_block bb)
5305 bool changed = false;
5306 edge e;
5307 edge_iterator ei;
5308 tree stmt = last_stmt (bb);
5310 if (stmt && tree_can_throw_internal (stmt))
5311 return false;
5313 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5315 if (e->flags & EDGE_EH)
5317 remove_edge (e);
5318 changed = true;
5320 else
5321 ei_next (&ei);
5324 /* Removal of dead EH edges might change dominators of not
5325 just immediate successors. E.g. when bb1 is changed so that
5326 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5327 eh edges purged by this function in:
5331 1-->2
5332 / \ |
5333 v v |
5334 3-->4 |
5336 --->5
5339 idom(bb5) must be recomputed. For now just free the dominance
5340 info. */
5341 if (changed)
5342 free_dominance_info (CDI_DOMINATORS);
5344 return changed;
5347 bool
5348 tree_purge_all_dead_eh_edges (bitmap blocks)
5350 bool changed = false;
5351 unsigned i;
5352 bitmap_iterator bi;
5354 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5356 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5359 return changed;
5362 /* This function is called whenever a new edge is created or
5363 redirected. */
5365 static void
5366 tree_execute_on_growing_pred (edge e)
5368 basic_block bb = e->dest;
5370 if (phi_nodes (bb))
5371 reserve_phi_args_for_new_edge (bb);
5374 /* This function is called immediately before edge E is removed from
5375 the edge vector E->dest->preds. */
5377 static void
5378 tree_execute_on_shrinking_pred (edge e)
5380 if (phi_nodes (e->dest))
5381 remove_phi_args (e);
5384 /*---------------------------------------------------------------------------
5385 Helper functions for Loop versioning
5386 ---------------------------------------------------------------------------*/
5388 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5389 of 'first'. Both of them are dominated by 'new_head' basic block. When
5390 'new_head' was created by 'second's incoming edge it received phi arguments
5391 on the edge by split_edge(). Later, additional edge 'e' was created to
5392 connect 'new_head' and 'first'. Now this routine adds phi args on this
5393 additional edge 'e' that new_head to second edge received as part of edge
5394 splitting.
5397 static void
5398 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5399 basic_block new_head, edge e)
5401 tree phi1, phi2;
5402 edge e2 = find_edge (new_head, second);
5404 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5405 edge, we should always have an edge from NEW_HEAD to SECOND. */
5406 gcc_assert (e2 != NULL);
5408 /* Browse all 'second' basic block phi nodes and add phi args to
5409 edge 'e' for 'first' head. PHI args are always in correct order. */
5411 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5412 phi2 && phi1;
5413 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5415 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5416 add_phi_arg (phi1, def, e);
5420 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5421 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5422 the destination of the ELSE part. */
5423 static void
5424 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5425 basic_block cond_bb, void *cond_e)
5427 block_stmt_iterator bsi;
5428 tree goto1 = NULL_TREE;
5429 tree goto2 = NULL_TREE;
5430 tree new_cond_expr = NULL_TREE;
5431 tree cond_expr = (tree) cond_e;
5432 edge e0;
5434 /* Build new conditional expr */
5435 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5436 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5437 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5439 /* Add new cond in cond_bb. */
5440 bsi = bsi_start (cond_bb);
5441 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5442 /* Adjust edges appropriately to connect new head with first head
5443 as well as second head. */
5444 e0 = single_succ_edge (cond_bb);
5445 e0->flags &= ~EDGE_FALLTHRU;
5446 e0->flags |= EDGE_FALSE_VALUE;
5449 struct cfg_hooks tree_cfg_hooks = {
5450 "tree",
5451 tree_verify_flow_info,
5452 tree_dump_bb, /* dump_bb */
5453 create_bb, /* create_basic_block */
5454 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5455 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5456 remove_bb, /* delete_basic_block */
5457 tree_split_block, /* split_block */
5458 tree_move_block_after, /* move_block_after */
5459 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5460 tree_merge_blocks, /* merge_blocks */
5461 tree_predict_edge, /* predict_edge */
5462 tree_predicted_by_p, /* predicted_by_p */
5463 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5464 tree_duplicate_bb, /* duplicate_block */
5465 tree_split_edge, /* split_edge */
5466 tree_make_forwarder_block, /* make_forward_block */
5467 NULL, /* tidy_fallthru_edge */
5468 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5469 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5470 tree_flow_call_edges_add, /* flow_call_edges_add */
5471 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5472 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5473 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5474 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5475 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5476 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5477 flush_pending_stmts /* flush_pending_stmts */
5481 /* Split all critical edges. */
5483 static void
5484 split_critical_edges (void)
5486 basic_block bb;
5487 edge e;
5488 edge_iterator ei;
5490 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5491 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5492 mappings around the calls to split_edge. */
5493 start_recording_case_labels ();
5494 FOR_ALL_BB (bb)
5496 FOR_EACH_EDGE (e, ei, bb->succs)
5497 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5499 split_edge (e);
5502 end_recording_case_labels ();
5505 struct tree_opt_pass pass_split_crit_edges =
5507 "crited", /* name */
5508 NULL, /* gate */
5509 split_critical_edges, /* execute */
5510 NULL, /* sub */
5511 NULL, /* next */
5512 0, /* static_pass_number */
5513 TV_TREE_SPLIT_EDGES, /* tv_id */
5514 PROP_cfg, /* properties required */
5515 PROP_no_crit_edges, /* properties_provided */
5516 0, /* properties_destroyed */
5517 0, /* todo_flags_start */
5518 TODO_dump_func, /* todo_flags_finish */
5519 0 /* letter */
5523 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5524 a temporary, make sure and register it to be renamed if necessary,
5525 and finally return the temporary. Put the statements to compute
5526 EXP before the current statement in BSI. */
5528 tree
5529 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5531 tree t, new_stmt, orig_stmt;
5533 if (is_gimple_val (exp))
5534 return exp;
5536 t = make_rename_temp (type, NULL);
5537 new_stmt = build (MODIFY_EXPR, type, t, exp);
5539 orig_stmt = bsi_stmt (*bsi);
5540 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5541 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5543 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5545 return t;
5548 /* Build a ternary operation and gimplify it. Emit code before BSI.
5549 Return the gimple_val holding the result. */
5551 tree
5552 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5553 tree type, tree a, tree b, tree c)
5555 tree ret;
5557 ret = fold (build3 (code, type, a, b, c));
5558 STRIP_NOPS (ret);
5560 return gimplify_val (bsi, type, ret);
5563 /* Build a binary operation and gimplify it. Emit code before BSI.
5564 Return the gimple_val holding the result. */
5566 tree
5567 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5568 tree type, tree a, tree b)
5570 tree ret;
5572 ret = fold (build2 (code, type, a, b));
5573 STRIP_NOPS (ret);
5575 return gimplify_val (bsi, type, ret);
5578 /* Build a unary operation and gimplify it. Emit code before BSI.
5579 Return the gimple_val holding the result. */
5581 tree
5582 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5583 tree a)
5585 tree ret;
5587 ret = fold (build1 (code, type, a));
5588 STRIP_NOPS (ret);
5590 return gimplify_val (bsi, type, ret);
5595 /* Emit return warnings. */
5597 static void
5598 execute_warn_function_return (void)
5600 #ifdef USE_MAPPED_LOCATION
5601 source_location location;
5602 #else
5603 location_t *locus;
5604 #endif
5605 tree last;
5606 edge e;
5607 edge_iterator ei;
5609 if (warn_missing_noreturn
5610 && !TREE_THIS_VOLATILE (cfun->decl)
5611 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5612 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5613 warning (0, "%Jfunction might be possible candidate for "
5614 "attribute %<noreturn%>",
5615 cfun->decl);
5617 /* If we have a path to EXIT, then we do return. */
5618 if (TREE_THIS_VOLATILE (cfun->decl)
5619 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5621 #ifdef USE_MAPPED_LOCATION
5622 location = UNKNOWN_LOCATION;
5623 #else
5624 locus = NULL;
5625 #endif
5626 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5628 last = last_stmt (e->src);
5629 if (TREE_CODE (last) == RETURN_EXPR
5630 #ifdef USE_MAPPED_LOCATION
5631 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5632 #else
5633 && (locus = EXPR_LOCUS (last)) != NULL)
5634 #endif
5635 break;
5637 #ifdef USE_MAPPED_LOCATION
5638 if (location == UNKNOWN_LOCATION)
5639 location = cfun->function_end_locus;
5640 warning (0, "%H%<noreturn%> function does return", &location);
5641 #else
5642 if (!locus)
5643 locus = &cfun->function_end_locus;
5644 warning (0, "%H%<noreturn%> function does return", locus);
5645 #endif
5648 /* If we see "return;" in some basic block, then we do reach the end
5649 without returning a value. */
5650 else if (warn_return_type
5651 && !TREE_NO_WARNING (cfun->decl)
5652 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5653 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5655 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5657 tree last = last_stmt (e->src);
5658 if (TREE_CODE (last) == RETURN_EXPR
5659 && TREE_OPERAND (last, 0) == NULL)
5661 #ifdef USE_MAPPED_LOCATION
5662 location = EXPR_LOCATION (last);
5663 if (location == UNKNOWN_LOCATION)
5664 location = cfun->function_end_locus;
5665 warning (0, "%Hcontrol reaches end of non-void function", &location);
5666 #else
5667 locus = EXPR_LOCUS (last);
5668 if (!locus)
5669 locus = &cfun->function_end_locus;
5670 warning (0, "%Hcontrol reaches end of non-void function", locus);
5671 #endif
5672 TREE_NO_WARNING (cfun->decl) = 1;
5673 break;
5680 /* Given a basic block B which ends with a conditional and has
5681 precisely two successors, determine which of the edges is taken if
5682 the conditional is true and which is taken if the conditional is
5683 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5685 void
5686 extract_true_false_edges_from_block (basic_block b,
5687 edge *true_edge,
5688 edge *false_edge)
5690 edge e = EDGE_SUCC (b, 0);
5692 if (e->flags & EDGE_TRUE_VALUE)
5694 *true_edge = e;
5695 *false_edge = EDGE_SUCC (b, 1);
5697 else
5699 *false_edge = e;
5700 *true_edge = EDGE_SUCC (b, 1);
5704 struct tree_opt_pass pass_warn_function_return =
5706 NULL, /* name */
5707 NULL, /* gate */
5708 execute_warn_function_return, /* execute */
5709 NULL, /* sub */
5710 NULL, /* next */
5711 0, /* static_pass_number */
5712 0, /* tv_id */
5713 PROP_cfg, /* properties_required */
5714 0, /* properties_provided */
5715 0, /* properties_destroyed */
5716 0, /* todo_flags_start */
5717 0, /* todo_flags_finish */
5718 0 /* letter */