Parallelize in_ifaddrhead operation
[dragonfly.git] / contrib / gcc-4.1 / gcc / tree-cfg.c
blob462c73de47272c9281a5950364a7fd4607811f9e
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = 0;
133 last_basic_block = 0;
134 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
136 /* Build a mapping of labels to their associated blocks. */
137 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
138 "label to block map");
140 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
141 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
144 /*---------------------------------------------------------------------------
145 Create basic blocks
146 ---------------------------------------------------------------------------*/
148 /* Entry point to the CFG builder for trees. TP points to the list of
149 statements to be added to the flowgraph. */
151 static void
152 build_tree_cfg (tree *tp)
154 /* Register specific tree functions. */
155 tree_register_cfg_hooks ();
157 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
159 init_empty_tree_cfg ();
161 found_computed_goto = 0;
162 make_blocks (*tp);
164 /* Computed gotos are hell to deal with, especially if there are
165 lots of them with a large number of destinations. So we factor
166 them to a common computed goto location before we build the
167 edge list. After we convert back to normal form, we will un-factor
168 the computed gotos since factoring introduces an unwanted jump. */
169 if (found_computed_goto)
170 factor_computed_gotos ();
172 /* Make sure there is always at least one block, even if it's empty. */
173 if (n_basic_blocks == 0)
174 create_empty_bb (ENTRY_BLOCK_PTR);
176 /* Adjust the size of the array. */
177 VARRAY_GROW (basic_block_info, n_basic_blocks);
179 /* To speed up statement iterator walks, we first purge dead labels. */
180 cleanup_dead_labels ();
182 /* Group case nodes to reduce the number of edges.
183 We do this after cleaning up dead labels because otherwise we miss
184 a lot of obvious case merging opportunities. */
185 group_case_labels ();
187 /* Create the edges of the flowgraph. */
188 make_edges ();
190 /* Debugging dumps. */
192 /* Write the flowgraph to a VCG file. */
194 int local_dump_flags;
195 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
196 if (dump_file)
198 tree_cfg2vcg (dump_file);
199 dump_end (TDI_vcg, dump_file);
203 #ifdef ENABLE_CHECKING
204 verify_stmts ();
205 #endif
207 /* Dump a textual representation of the flowgraph. */
208 if (dump_file)
209 dump_tree_cfg (dump_file, dump_flags);
212 static void
213 execute_build_cfg (void)
215 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
218 struct tree_opt_pass pass_build_cfg =
220 "cfg", /* name */
221 NULL, /* gate */
222 execute_build_cfg, /* execute */
223 NULL, /* sub */
224 NULL, /* next */
225 0, /* static_pass_number */
226 TV_TREE_CFG, /* tv_id */
227 PROP_gimple_leh, /* properties_required */
228 PROP_cfg, /* properties_provided */
229 0, /* properties_destroyed */
230 0, /* todo_flags_start */
231 TODO_verify_stmts, /* todo_flags_finish */
232 0 /* letter */
235 /* Search the CFG for any computed gotos. If found, factor them to a
236 common computed goto site. Also record the location of that site so
237 that we can un-factor the gotos after we have converted back to
238 normal form. */
240 static void
241 factor_computed_gotos (void)
243 basic_block bb;
244 tree factored_label_decl = NULL;
245 tree var = NULL;
246 tree factored_computed_goto_label = NULL;
247 tree factored_computed_goto = NULL;
249 /* We know there are one or more computed gotos in this function.
250 Examine the last statement in each basic block to see if the block
251 ends with a computed goto. */
253 FOR_EACH_BB (bb)
255 block_stmt_iterator bsi = bsi_last (bb);
256 tree last;
258 if (bsi_end_p (bsi))
259 continue;
260 last = bsi_stmt (bsi);
262 /* Ignore the computed goto we create when we factor the original
263 computed gotos. */
264 if (last == factored_computed_goto)
265 continue;
267 /* If the last statement is a computed goto, factor it. */
268 if (computed_goto_p (last))
270 tree assignment;
272 /* The first time we find a computed goto we need to create
273 the factored goto block and the variable each original
274 computed goto will use for their goto destination. */
275 if (! factored_computed_goto)
277 basic_block new_bb = create_empty_bb (bb);
278 block_stmt_iterator new_bsi = bsi_start (new_bb);
280 /* Create the destination of the factored goto. Each original
281 computed goto will put its desired destination into this
282 variable and jump to the label we create immediately
283 below. */
284 var = create_tmp_var (ptr_type_node, "gotovar");
286 /* Build a label for the new block which will contain the
287 factored computed goto. */
288 factored_label_decl = create_artificial_label ();
289 factored_computed_goto_label
290 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
291 bsi_insert_after (&new_bsi, factored_computed_goto_label,
292 BSI_NEW_STMT);
294 /* Build our new computed goto. */
295 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
296 bsi_insert_after (&new_bsi, factored_computed_goto,
297 BSI_NEW_STMT);
300 /* Copy the original computed goto's destination into VAR. */
301 assignment = build (MODIFY_EXPR, ptr_type_node,
302 var, GOTO_DESTINATION (last));
303 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
305 /* And re-vector the computed goto to the new destination. */
306 GOTO_DESTINATION (last) = factored_label_decl;
312 /* Build a flowgraph for the statement_list STMT_LIST. */
314 static void
315 make_blocks (tree stmt_list)
317 tree_stmt_iterator i = tsi_start (stmt_list);
318 tree stmt = NULL;
319 bool start_new_block = true;
320 bool first_stmt_of_list = true;
321 basic_block bb = ENTRY_BLOCK_PTR;
323 while (!tsi_end_p (i))
325 tree prev_stmt;
327 prev_stmt = stmt;
328 stmt = tsi_stmt (i);
330 /* If the statement starts a new basic block or if we have determined
331 in a previous pass that we need to create a new block for STMT, do
332 so now. */
333 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
335 if (!first_stmt_of_list)
336 stmt_list = tsi_split_statement_list_before (&i);
337 bb = create_basic_block (stmt_list, NULL, bb);
338 start_new_block = false;
341 /* Now add STMT to BB and create the subgraphs for special statement
342 codes. */
343 set_bb_for_stmt (stmt, bb);
345 if (computed_goto_p (stmt))
346 found_computed_goto = true;
348 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 next iteration. */
350 if (stmt_ends_bb_p (stmt))
351 start_new_block = true;
353 tsi_next (&i);
354 first_stmt_of_list = false;
359 /* Create and return a new empty basic block after bb AFTER. */
361 static basic_block
362 create_bb (void *h, void *e, basic_block after)
364 basic_block bb;
366 gcc_assert (!e);
368 /* Create and initialize a new basic block. Since alloc_block uses
369 ggc_alloc_cleared to allocate a basic block, we do not have to
370 clear the newly allocated basic block here. */
371 bb = alloc_block ();
373 bb->index = last_basic_block;
374 bb->flags = BB_NEW;
375 bb->stmt_list = h ? h : alloc_stmt_list ();
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
380 /* Grow the basic block array if needed. */
381 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384 VARRAY_GROW (basic_block_info, new_size);
387 /* Add the newly created block to the array. */
388 BASIC_BLOCK (last_basic_block) = bb;
390 n_basic_blocks++;
391 last_basic_block++;
393 return bb;
397 /*---------------------------------------------------------------------------
398 Edge creation
399 ---------------------------------------------------------------------------*/
401 /* Fold COND_EXPR_COND of each COND_EXPR. */
403 void
404 fold_cond_expr_cond (void)
406 basic_block bb;
408 FOR_EACH_BB (bb)
410 tree stmt = last_stmt (bb);
412 if (stmt
413 && TREE_CODE (stmt) == COND_EXPR)
415 tree cond = fold (COND_EXPR_COND (stmt));
416 if (integer_zerop (cond))
417 COND_EXPR_COND (stmt) = boolean_false_node;
418 else if (integer_onep (cond))
419 COND_EXPR_COND (stmt) = boolean_true_node;
424 /* Join all the blocks in the flowgraph. */
426 static void
427 make_edges (void)
429 basic_block bb;
431 /* Create an edge from entry to the first block with executable
432 statements in it. */
433 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
435 /* Traverse the basic block array placing edges. */
436 FOR_EACH_BB (bb)
438 tree first = first_stmt (bb);
439 tree last = last_stmt (bb);
441 if (first)
443 /* Edges for statements that always alter flow control. */
444 if (is_ctrl_stmt (last))
445 make_ctrl_stmt_edges (bb);
447 /* Edges for statements that sometimes alter flow control. */
448 if (is_ctrl_altering_stmt (last))
449 make_exit_edges (bb);
452 /* Finally, if no edges were created above, this is a regular
453 basic block that only needs a fallthru edge. */
454 if (EDGE_COUNT (bb->succs) == 0)
455 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
458 /* We do not care about fake edges, so remove any that the CFG
459 builder inserted for completeness. */
460 remove_fake_exit_edges ();
462 /* Fold COND_EXPR_COND of each COND_EXPR. */
463 fold_cond_expr_cond ();
465 /* Clean up the graph and warn for unreachable code. */
466 cleanup_tree_cfg ();
470 /* Create edges for control statement at basic block BB. */
472 static void
473 make_ctrl_stmt_edges (basic_block bb)
475 tree last = last_stmt (bb);
477 gcc_assert (last);
478 switch (TREE_CODE (last))
480 case GOTO_EXPR:
481 make_goto_expr_edges (bb);
482 break;
484 case RETURN_EXPR:
485 make_edge (bb, EXIT_BLOCK_PTR, 0);
486 break;
488 case COND_EXPR:
489 make_cond_expr_edges (bb);
490 break;
492 case SWITCH_EXPR:
493 make_switch_expr_edges (bb);
494 break;
496 case RESX_EXPR:
497 make_eh_edges (last);
498 /* Yet another NORETURN hack. */
499 if (EDGE_COUNT (bb->succs) == 0)
500 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
501 break;
503 default:
504 gcc_unreachable ();
509 /* Create exit edges for statements in block BB that alter the flow of
510 control. Statements that alter the control flow are 'goto', 'return'
511 and calls to non-returning functions. */
513 static void
514 make_exit_edges (basic_block bb)
516 tree last = last_stmt (bb), op;
518 gcc_assert (last);
519 switch (TREE_CODE (last))
521 case RESX_EXPR:
522 break;
523 case CALL_EXPR:
524 /* If this function receives a nonlocal goto, then we need to
525 make edges from this call site to all the nonlocal goto
526 handlers. */
527 if (TREE_SIDE_EFFECTS (last)
528 && current_function_has_nonlocal_label)
529 make_goto_expr_edges (bb);
531 /* If this statement has reachable exception handlers, then
532 create abnormal edges to them. */
533 make_eh_edges (last);
535 /* Some calls are known not to return. For such calls we create
536 a fake edge.
538 We really need to revamp how we build edges so that it's not
539 such a bloody pain to avoid creating edges for this case since
540 all we do is remove these edges when we're done building the
541 CFG. */
542 if (call_expr_flags (last) & ECF_NORETURN)
544 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
545 return;
548 /* Don't forget the fall-thru edge. */
549 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
550 break;
552 case MODIFY_EXPR:
553 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
554 may have an abnormal edge. Search the RHS for this case and
555 create any required edges. */
556 op = get_call_expr_in (last);
557 if (op && TREE_SIDE_EFFECTS (op)
558 && current_function_has_nonlocal_label)
559 make_goto_expr_edges (bb);
561 make_eh_edges (last);
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
565 default:
566 gcc_unreachable ();
571 /* Create the edges for a COND_EXPR starting at block BB.
572 At this point, both clauses must contain only simple gotos. */
574 static void
575 make_cond_expr_edges (basic_block bb)
577 tree entry = last_stmt (bb);
578 basic_block then_bb, else_bb;
579 tree then_label, else_label;
580 edge e;
582 gcc_assert (entry);
583 gcc_assert (TREE_CODE (entry) == COND_EXPR);
585 /* Entry basic blocks for each component. */
586 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
587 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
588 then_bb = label_to_block (then_label);
589 else_bb = label_to_block (else_label);
591 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
592 #ifdef USE_MAPPED_LOCATION
593 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
594 #else
595 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
596 #endif
597 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
598 if (e)
600 #ifdef USE_MAPPED_LOCATION
601 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
602 #else
603 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
604 #endif
608 /* Hashing routine for EDGE_TO_CASES. */
610 static hashval_t
611 edge_to_cases_hash (const void *p)
613 edge e = ((struct edge_to_cases_elt *)p)->e;
615 /* Hash on the edge itself (which is a pointer). */
616 return htab_hash_pointer (e);
619 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
620 for equality is just a pointer comparison. */
622 static int
623 edge_to_cases_eq (const void *p1, const void *p2)
625 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
626 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
628 return e1 == e2;
631 /* Called for each element in the hash table (P) as we delete the
632 edge to cases hash table.
634 Clear all the TREE_CHAINs to prevent problems with copying of
635 SWITCH_EXPRs and structure sharing rules, then free the hash table
636 element. */
638 static void
639 edge_to_cases_cleanup (void *p)
641 struct edge_to_cases_elt *elt = p;
642 tree t, next;
644 for (t = elt->case_labels; t; t = next)
646 next = TREE_CHAIN (t);
647 TREE_CHAIN (t) = NULL;
649 free (p);
652 /* Start recording information mapping edges to case labels. */
654 void
655 start_recording_case_labels (void)
657 gcc_assert (edge_to_cases == NULL);
659 edge_to_cases = htab_create (37,
660 edge_to_cases_hash,
661 edge_to_cases_eq,
662 edge_to_cases_cleanup);
665 /* Return nonzero if we are recording information for case labels. */
667 static bool
668 recording_case_labels_p (void)
670 return (edge_to_cases != NULL);
673 /* Stop recording information mapping edges to case labels and
674 remove any information we have recorded. */
675 void
676 end_recording_case_labels (void)
678 htab_delete (edge_to_cases);
679 edge_to_cases = NULL;
682 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
684 static void
685 record_switch_edge (edge e, tree case_label)
687 struct edge_to_cases_elt *elt;
688 void **slot;
690 /* Build a hash table element so we can see if E is already
691 in the table. */
692 elt = xmalloc (sizeof (struct edge_to_cases_elt));
693 elt->e = e;
694 elt->case_labels = case_label;
696 slot = htab_find_slot (edge_to_cases, elt, INSERT);
698 if (*slot == NULL)
700 /* E was not in the hash table. Install E into the hash table. */
701 *slot = (void *)elt;
703 else
705 /* E was already in the hash table. Free ELT as we do not need it
706 anymore. */
707 free (elt);
709 /* Get the entry stored in the hash table. */
710 elt = (struct edge_to_cases_elt *) *slot;
712 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
713 TREE_CHAIN (case_label) = elt->case_labels;
714 elt->case_labels = case_label;
718 /* If we are inside a {start,end}_recording_cases block, then return
719 a chain of CASE_LABEL_EXPRs from T which reference E.
721 Otherwise return NULL. */
723 static tree
724 get_cases_for_edge (edge e, tree t)
726 struct edge_to_cases_elt elt, *elt_p;
727 void **slot;
728 size_t i, n;
729 tree vec;
731 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
732 chains available. Return NULL so the caller can detect this case. */
733 if (!recording_case_labels_p ())
734 return NULL;
736 restart:
737 elt.e = e;
738 elt.case_labels = NULL;
739 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
741 if (slot)
743 elt_p = (struct edge_to_cases_elt *)*slot;
744 return elt_p->case_labels;
747 /* If we did not find E in the hash table, then this must be the first
748 time we have been queried for information about E & T. Add all the
749 elements from T to the hash table then perform the query again. */
751 vec = SWITCH_LABELS (t);
752 n = TREE_VEC_LENGTH (vec);
753 for (i = 0; i < n; i++)
755 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
756 basic_block label_bb = label_to_block (lab);
757 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
759 goto restart;
762 /* Create the edges for a SWITCH_EXPR starting at block BB.
763 At this point, the switch body has been lowered and the
764 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
766 static void
767 make_switch_expr_edges (basic_block bb)
769 tree entry = last_stmt (bb);
770 size_t i, n;
771 tree vec;
773 vec = SWITCH_LABELS (entry);
774 n = TREE_VEC_LENGTH (vec);
776 for (i = 0; i < n; ++i)
778 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
779 basic_block label_bb = label_to_block (lab);
780 make_edge (bb, label_bb, 0);
785 /* Return the basic block holding label DEST. */
787 basic_block
788 label_to_block_fn (struct function *ifun, tree dest)
790 int uid = LABEL_DECL_UID (dest);
792 /* We would die hard when faced by an undefined label. Emit a label to
793 the very first basic block. This will hopefully make even the dataflow
794 and undefined variable warnings quite right. */
795 if ((errorcount || sorrycount) && uid < 0)
797 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
798 tree stmt;
800 stmt = build1 (LABEL_EXPR, void_type_node, dest);
801 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
802 uid = LABEL_DECL_UID (dest);
804 if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
805 return NULL;
806 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
809 /* Create edges for a goto statement at block BB. */
811 static void
812 make_goto_expr_edges (basic_block bb)
814 tree goto_t;
815 basic_block target_bb;
816 int for_call;
817 block_stmt_iterator last = bsi_last (bb);
819 goto_t = bsi_stmt (last);
821 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
822 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
823 from a nonlocal goto. */
824 if (TREE_CODE (goto_t) != GOTO_EXPR)
825 for_call = 1;
826 else
828 tree dest = GOTO_DESTINATION (goto_t);
829 for_call = 0;
831 /* A GOTO to a local label creates normal edges. */
832 if (simple_goto_p (goto_t))
834 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
835 #ifdef USE_MAPPED_LOCATION
836 e->goto_locus = EXPR_LOCATION (goto_t);
837 #else
838 e->goto_locus = EXPR_LOCUS (goto_t);
839 #endif
840 bsi_remove (&last);
841 return;
844 /* Nothing more to do for nonlocal gotos. */
845 if (TREE_CODE (dest) == LABEL_DECL)
846 return;
848 /* Computed gotos remain. */
851 /* Look for the block starting with the destination label. In the
852 case of a computed goto, make an edge to any label block we find
853 in the CFG. */
854 FOR_EACH_BB (target_bb)
856 block_stmt_iterator bsi;
858 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
860 tree target = bsi_stmt (bsi);
862 if (TREE_CODE (target) != LABEL_EXPR)
863 break;
865 if (
866 /* Computed GOTOs. Make an edge to every label block that has
867 been marked as a potential target for a computed goto. */
868 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
869 /* Nonlocal GOTO target. Make an edge to every label block
870 that has been marked as a potential target for a nonlocal
871 goto. */
872 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
874 make_edge (bb, target_bb, EDGE_ABNORMAL);
875 break;
880 /* Degenerate case of computed goto with no labels. */
881 if (!for_call && EDGE_COUNT (bb->succs) == 0)
882 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
886 /*---------------------------------------------------------------------------
887 Flowgraph analysis
888 ---------------------------------------------------------------------------*/
890 /* Cleanup useless labels in basic blocks. This is something we wish
891 to do early because it allows us to group case labels before creating
892 the edges for the CFG, and it speeds up block statement iterators in
893 all passes later on.
894 We only run this pass once, running it more than once is probably not
895 profitable. */
897 /* A map from basic block index to the leading label of that block. */
898 static tree *label_for_bb;
900 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
901 static void
902 update_eh_label (struct eh_region *region)
904 tree old_label = get_eh_region_tree_label (region);
905 if (old_label)
907 tree new_label;
908 basic_block bb = label_to_block (old_label);
910 /* ??? After optimizing, there may be EH regions with labels
911 that have already been removed from the function body, so
912 there is no basic block for them. */
913 if (! bb)
914 return;
916 new_label = label_for_bb[bb->index];
917 set_eh_region_tree_label (region, new_label);
921 /* Given LABEL return the first label in the same basic block. */
922 static tree
923 main_block_label (tree label)
925 basic_block bb = label_to_block (label);
927 /* label_to_block possibly inserted undefined label into the chain. */
928 if (!label_for_bb[bb->index])
929 label_for_bb[bb->index] = label;
930 return label_for_bb[bb->index];
933 /* Cleanup redundant labels. This is a three-step process:
934 1) Find the leading label for each block.
935 2) Redirect all references to labels to the leading labels.
936 3) Cleanup all useless labels. */
938 void
939 cleanup_dead_labels (void)
941 basic_block bb;
942 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
944 /* Find a suitable label for each block. We use the first user-defined
945 label if there is one, or otherwise just the first label we see. */
946 FOR_EACH_BB (bb)
948 block_stmt_iterator i;
950 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
952 tree label, stmt = bsi_stmt (i);
954 if (TREE_CODE (stmt) != LABEL_EXPR)
955 break;
957 label = LABEL_EXPR_LABEL (stmt);
959 /* If we have not yet seen a label for the current block,
960 remember this one and see if there are more labels. */
961 if (! label_for_bb[bb->index])
963 label_for_bb[bb->index] = label;
964 continue;
967 /* If we did see a label for the current block already, but it
968 is an artificially created label, replace it if the current
969 label is a user defined label. */
970 if (! DECL_ARTIFICIAL (label)
971 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
973 label_for_bb[bb->index] = label;
974 break;
979 /* Now redirect all jumps/branches to the selected label.
980 First do so for each block ending in a control statement. */
981 FOR_EACH_BB (bb)
983 tree stmt = last_stmt (bb);
984 if (!stmt)
985 continue;
987 switch (TREE_CODE (stmt))
989 case COND_EXPR:
991 tree true_branch, false_branch;
993 true_branch = COND_EXPR_THEN (stmt);
994 false_branch = COND_EXPR_ELSE (stmt);
996 GOTO_DESTINATION (true_branch)
997 = main_block_label (GOTO_DESTINATION (true_branch));
998 GOTO_DESTINATION (false_branch)
999 = main_block_label (GOTO_DESTINATION (false_branch));
1001 break;
1004 case SWITCH_EXPR:
1006 size_t i;
1007 tree vec = SWITCH_LABELS (stmt);
1008 size_t n = TREE_VEC_LENGTH (vec);
1010 /* Replace all destination labels. */
1011 for (i = 0; i < n; ++i)
1013 tree elt = TREE_VEC_ELT (vec, i);
1014 tree label = main_block_label (CASE_LABEL (elt));
1015 CASE_LABEL (elt) = label;
1017 break;
1020 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1021 remove them until after we've created the CFG edges. */
1022 case GOTO_EXPR:
1023 if (! computed_goto_p (stmt))
1025 GOTO_DESTINATION (stmt)
1026 = main_block_label (GOTO_DESTINATION (stmt));
1027 break;
1030 default:
1031 break;
1035 for_each_eh_region (update_eh_label);
1037 /* Finally, purge dead labels. All user-defined labels and labels that
1038 can be the target of non-local gotos and labels which have their
1039 address taken are preserved. */
1040 FOR_EACH_BB (bb)
1042 block_stmt_iterator i;
1043 tree label_for_this_bb = label_for_bb[bb->index];
1045 if (! label_for_this_bb)
1046 continue;
1048 for (i = bsi_start (bb); !bsi_end_p (i); )
1050 tree label, stmt = bsi_stmt (i);
1052 if (TREE_CODE (stmt) != LABEL_EXPR)
1053 break;
1055 label = LABEL_EXPR_LABEL (stmt);
1057 if (label == label_for_this_bb
1058 || ! DECL_ARTIFICIAL (label)
1059 || DECL_NONLOCAL (label)
1060 || FORCED_LABEL (label))
1061 bsi_next (&i);
1062 else
1063 bsi_remove (&i);
1067 free (label_for_bb);
1070 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1071 and scan the sorted vector of cases. Combine the ones jumping to the
1072 same label.
1073 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1075 void
1076 group_case_labels (void)
1078 basic_block bb;
1080 FOR_EACH_BB (bb)
1082 tree stmt = last_stmt (bb);
1083 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1085 tree labels = SWITCH_LABELS (stmt);
1086 int old_size = TREE_VEC_LENGTH (labels);
1087 int i, j, new_size = old_size;
1088 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1089 tree default_label;
1091 /* The default label is always the last case in a switch
1092 statement after gimplification. */
1093 default_label = CASE_LABEL (default_case);
1095 /* Look for possible opportunities to merge cases.
1096 Ignore the last element of the label vector because it
1097 must be the default case. */
1098 i = 0;
1099 while (i < old_size - 1)
1101 tree base_case, base_label, base_high;
1102 base_case = TREE_VEC_ELT (labels, i);
1104 gcc_assert (base_case);
1105 base_label = CASE_LABEL (base_case);
1107 /* Discard cases that have the same destination as the
1108 default case. */
1109 if (base_label == default_label)
1111 TREE_VEC_ELT (labels, i) = NULL_TREE;
1112 i++;
1113 new_size--;
1114 continue;
1117 base_high = CASE_HIGH (base_case) ?
1118 CASE_HIGH (base_case) : CASE_LOW (base_case);
1119 i++;
1120 /* Try to merge case labels. Break out when we reach the end
1121 of the label vector or when we cannot merge the next case
1122 label with the current one. */
1123 while (i < old_size - 1)
1125 tree merge_case = TREE_VEC_ELT (labels, i);
1126 tree merge_label = CASE_LABEL (merge_case);
1127 tree t = int_const_binop (PLUS_EXPR, base_high,
1128 integer_one_node, 1);
1130 /* Merge the cases if they jump to the same place,
1131 and their ranges are consecutive. */
1132 if (merge_label == base_label
1133 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1135 base_high = CASE_HIGH (merge_case) ?
1136 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1137 CASE_HIGH (base_case) = base_high;
1138 TREE_VEC_ELT (labels, i) = NULL_TREE;
1139 new_size--;
1140 i++;
1142 else
1143 break;
1147 /* Compress the case labels in the label vector, and adjust the
1148 length of the vector. */
1149 for (i = 0, j = 0; i < new_size; i++)
1151 while (! TREE_VEC_ELT (labels, j))
1152 j++;
1153 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1155 TREE_VEC_LENGTH (labels) = new_size;
1160 /* Checks whether we can merge block B into block A. */
1162 static bool
1163 tree_can_merge_blocks_p (basic_block a, basic_block b)
1165 tree stmt;
1166 block_stmt_iterator bsi;
1167 tree phi;
1169 if (!single_succ_p (a))
1170 return false;
1172 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1173 return false;
1175 if (single_succ (a) != b)
1176 return false;
1178 if (!single_pred_p (b))
1179 return false;
1181 if (b == EXIT_BLOCK_PTR)
1182 return false;
1184 /* If A ends by a statement causing exceptions or something similar, we
1185 cannot merge the blocks. */
1186 stmt = last_stmt (a);
1187 if (stmt && stmt_ends_bb_p (stmt))
1188 return false;
1190 /* Do not allow a block with only a non-local label to be merged. */
1191 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1192 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1193 return false;
1195 /* It must be possible to eliminate all phi nodes in B. If ssa form
1196 is not up-to-date, we cannot eliminate any phis. */
1197 phi = phi_nodes (b);
1198 if (phi)
1200 if (need_ssa_update_p ())
1201 return false;
1203 for (; phi; phi = PHI_CHAIN (phi))
1204 if (!is_gimple_reg (PHI_RESULT (phi))
1205 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1206 return false;
1209 /* Do not remove user labels. */
1210 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1212 stmt = bsi_stmt (bsi);
1213 if (TREE_CODE (stmt) != LABEL_EXPR)
1214 break;
1215 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1216 return false;
1219 /* Protect the loop latches. */
1220 if (current_loops
1221 && b->loop_father->latch == b)
1222 return false;
1224 return true;
1227 /* Replaces all uses of NAME by VAL. */
1229 void
1230 replace_uses_by (tree name, tree val)
1232 imm_use_iterator imm_iter;
1233 use_operand_p use;
1234 tree stmt;
1235 edge e;
1236 unsigned i;
1237 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1239 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1241 stmt = USE_STMT (use);
1242 replace_exp (use, val);
1244 if (TREE_CODE (stmt) == PHI_NODE)
1246 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1247 if (e->flags & EDGE_ABNORMAL)
1249 /* This can only occur for virtual operands, since
1250 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1251 would prevent replacement. */
1252 gcc_assert (!is_gimple_reg (name));
1253 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1256 else
1257 VEC_safe_push (tree, heap, stmts, stmt);
1260 /* We do not update the statements in the loop above. Consider
1261 x = w * w;
1263 If we performed the update in the first loop, the statement
1264 would be rescanned after first occurrence of w is replaced,
1265 the new uses would be placed to the beginning of the list,
1266 and we would never process them. */
1267 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1269 tree rhs;
1271 fold_stmt_inplace (stmt);
1273 rhs = get_rhs (stmt);
1274 if (TREE_CODE (rhs) == ADDR_EXPR)
1275 recompute_tree_invarant_for_addr_expr (rhs);
1277 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1278 mark_new_vars_to_rename (stmt);
1281 VEC_free (tree, heap, stmts);
1283 /* Also update the trees stored in loop structures. */
1284 if (current_loops)
1286 struct loop *loop;
1288 for (i = 0; i < current_loops->num; i++)
1290 loop = current_loops->parray[i];
1291 if (loop)
1292 substitute_in_loop_info (loop, name, val);
1297 /* Merge block B into block A. */
1299 static void
1300 tree_merge_blocks (basic_block a, basic_block b)
1302 block_stmt_iterator bsi;
1303 tree_stmt_iterator last;
1304 tree phi;
1306 if (dump_file)
1307 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1309 /* Remove all single-valued PHI nodes from block B of the form
1310 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1311 bsi = bsi_last (a);
1312 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1314 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1315 tree copy;
1316 bool may_replace_uses = may_propagate_copy (def, use);
1318 /* In case we have loops to care about, do not propagate arguments of
1319 loop closed ssa phi nodes. */
1320 if (current_loops
1321 && is_gimple_reg (def)
1322 && TREE_CODE (use) == SSA_NAME
1323 && a->loop_father != b->loop_father)
1324 may_replace_uses = false;
1326 if (!may_replace_uses)
1328 gcc_assert (is_gimple_reg (def));
1330 /* Note that just emitting the copies is fine -- there is no problem
1331 with ordering of phi nodes. This is because A is the single
1332 predecessor of B, therefore results of the phi nodes cannot
1333 appear as arguments of the phi nodes. */
1334 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1335 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1336 SET_PHI_RESULT (phi, NULL_TREE);
1337 SSA_NAME_DEF_STMT (def) = copy;
1339 else
1340 replace_uses_by (def, use);
1342 remove_phi_node (phi, NULL);
1345 /* Ensure that B follows A. */
1346 move_block_after (b, a);
1348 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1349 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1351 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1352 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1354 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1356 tree label = bsi_stmt (bsi);
1358 bsi_remove (&bsi);
1359 /* Now that we can thread computed gotos, we might have
1360 a situation where we have a forced label in block B
1361 However, the label at the start of block B might still be
1362 used in other ways (think about the runtime checking for
1363 Fortran assigned gotos). So we can not just delete the
1364 label. Instead we move the label to the start of block A. */
1365 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1367 block_stmt_iterator dest_bsi = bsi_start (a);
1368 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1371 else
1373 set_bb_for_stmt (bsi_stmt (bsi), a);
1374 bsi_next (&bsi);
1378 /* Merge the chains. */
1379 last = tsi_last (a->stmt_list);
1380 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1381 b->stmt_list = NULL;
1385 /* Return the one of two successors of BB that is not reachable by a
1386 reached by a complex edge, if there is one. Else, return BB. We use
1387 this in optimizations that use post-dominators for their heuristics,
1388 to catch the cases in C++ where function calls are involved. */
1390 basic_block
1391 single_noncomplex_succ (basic_block bb)
1393 edge e0, e1;
1394 if (EDGE_COUNT (bb->succs) != 2)
1395 return bb;
1397 e0 = EDGE_SUCC (bb, 0);
1398 e1 = EDGE_SUCC (bb, 1);
1399 if (e0->flags & EDGE_COMPLEX)
1400 return e1->dest;
1401 if (e1->flags & EDGE_COMPLEX)
1402 return e0->dest;
1404 return bb;
1409 /* Walk the function tree removing unnecessary statements.
1411 * Empty statement nodes are removed
1413 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1415 * Unnecessary COND_EXPRs are removed
1417 * Some unnecessary BIND_EXPRs are removed
1419 Clearly more work could be done. The trick is doing the analysis
1420 and removal fast enough to be a net improvement in compile times.
1422 Note that when we remove a control structure such as a COND_EXPR
1423 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1424 to ensure we eliminate all the useless code. */
1426 struct rus_data
1428 tree *last_goto;
1429 bool repeat;
1430 bool may_throw;
1431 bool may_branch;
1432 bool has_label;
1435 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1437 static bool
1438 remove_useless_stmts_warn_notreached (tree stmt)
1440 if (EXPR_HAS_LOCATION (stmt))
1442 location_t loc = EXPR_LOCATION (stmt);
1443 if (LOCATION_LINE (loc) > 0)
1445 warning (0, "%Hwill never be executed", &loc);
1446 return true;
1450 switch (TREE_CODE (stmt))
1452 case STATEMENT_LIST:
1454 tree_stmt_iterator i;
1455 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1456 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1457 return true;
1459 break;
1461 case COND_EXPR:
1462 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1463 return true;
1464 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1465 return true;
1466 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1467 return true;
1468 break;
1470 case TRY_FINALLY_EXPR:
1471 case TRY_CATCH_EXPR:
1472 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1473 return true;
1474 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1475 return true;
1476 break;
1478 case CATCH_EXPR:
1479 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1480 case EH_FILTER_EXPR:
1481 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1482 case BIND_EXPR:
1483 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1485 default:
1486 /* Not a live container. */
1487 break;
1490 return false;
1493 static void
1494 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1496 tree then_clause, else_clause, cond;
1497 bool save_has_label, then_has_label, else_has_label;
1499 save_has_label = data->has_label;
1500 data->has_label = false;
1501 data->last_goto = NULL;
1503 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1505 then_has_label = data->has_label;
1506 data->has_label = false;
1507 data->last_goto = NULL;
1509 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1511 else_has_label = data->has_label;
1512 data->has_label = save_has_label | then_has_label | else_has_label;
1514 then_clause = COND_EXPR_THEN (*stmt_p);
1515 else_clause = COND_EXPR_ELSE (*stmt_p);
1516 cond = fold (COND_EXPR_COND (*stmt_p));
1518 /* If neither arm does anything at all, we can remove the whole IF. */
1519 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1521 *stmt_p = build_empty_stmt ();
1522 data->repeat = true;
1525 /* If there are no reachable statements in an arm, then we can
1526 zap the entire conditional. */
1527 else if (integer_nonzerop (cond) && !else_has_label)
1529 if (warn_notreached)
1530 remove_useless_stmts_warn_notreached (else_clause);
1531 *stmt_p = then_clause;
1532 data->repeat = true;
1534 else if (integer_zerop (cond) && !then_has_label)
1536 if (warn_notreached)
1537 remove_useless_stmts_warn_notreached (then_clause);
1538 *stmt_p = else_clause;
1539 data->repeat = true;
1542 /* Check a couple of simple things on then/else with single stmts. */
1543 else
1545 tree then_stmt = expr_only (then_clause);
1546 tree else_stmt = expr_only (else_clause);
1548 /* Notice branches to a common destination. */
1549 if (then_stmt && else_stmt
1550 && TREE_CODE (then_stmt) == GOTO_EXPR
1551 && TREE_CODE (else_stmt) == GOTO_EXPR
1552 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1554 *stmt_p = then_stmt;
1555 data->repeat = true;
1558 /* If the THEN/ELSE clause merely assigns a value to a variable or
1559 parameter which is already known to contain that value, then
1560 remove the useless THEN/ELSE clause. */
1561 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1563 if (else_stmt
1564 && TREE_CODE (else_stmt) == MODIFY_EXPR
1565 && TREE_OPERAND (else_stmt, 0) == cond
1566 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1567 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1569 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1570 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1571 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1572 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1574 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1575 ? then_stmt : else_stmt);
1576 tree *location = (TREE_CODE (cond) == EQ_EXPR
1577 ? &COND_EXPR_THEN (*stmt_p)
1578 : &COND_EXPR_ELSE (*stmt_p));
1580 if (stmt
1581 && TREE_CODE (stmt) == MODIFY_EXPR
1582 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1583 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1584 *location = alloc_stmt_list ();
1588 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1589 would be re-introduced during lowering. */
1590 data->last_goto = NULL;
1594 static void
1595 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1597 bool save_may_branch, save_may_throw;
1598 bool this_may_branch, this_may_throw;
1600 /* Collect may_branch and may_throw information for the body only. */
1601 save_may_branch = data->may_branch;
1602 save_may_throw = data->may_throw;
1603 data->may_branch = false;
1604 data->may_throw = false;
1605 data->last_goto = NULL;
1607 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1609 this_may_branch = data->may_branch;
1610 this_may_throw = data->may_throw;
1611 data->may_branch |= save_may_branch;
1612 data->may_throw |= save_may_throw;
1613 data->last_goto = NULL;
1615 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1617 /* If the body is empty, then we can emit the FINALLY block without
1618 the enclosing TRY_FINALLY_EXPR. */
1619 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1621 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1622 data->repeat = true;
1625 /* If the handler is empty, then we can emit the TRY block without
1626 the enclosing TRY_FINALLY_EXPR. */
1627 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1629 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1630 data->repeat = true;
1633 /* If the body neither throws, nor branches, then we can safely
1634 string the TRY and FINALLY blocks together. */
1635 else if (!this_may_branch && !this_may_throw)
1637 tree stmt = *stmt_p;
1638 *stmt_p = TREE_OPERAND (stmt, 0);
1639 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1640 data->repeat = true;
1645 static void
1646 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1648 bool save_may_throw, this_may_throw;
1649 tree_stmt_iterator i;
1650 tree stmt;
1652 /* Collect may_throw information for the body only. */
1653 save_may_throw = data->may_throw;
1654 data->may_throw = false;
1655 data->last_goto = NULL;
1657 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1659 this_may_throw = data->may_throw;
1660 data->may_throw = save_may_throw;
1662 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1663 if (!this_may_throw)
1665 if (warn_notreached)
1666 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1667 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1668 data->repeat = true;
1669 return;
1672 /* Process the catch clause specially. We may be able to tell that
1673 no exceptions propagate past this point. */
1675 this_may_throw = true;
1676 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1677 stmt = tsi_stmt (i);
1678 data->last_goto = NULL;
1680 switch (TREE_CODE (stmt))
1682 case CATCH_EXPR:
1683 for (; !tsi_end_p (i); tsi_next (&i))
1685 stmt = tsi_stmt (i);
1686 /* If we catch all exceptions, then the body does not
1687 propagate exceptions past this point. */
1688 if (CATCH_TYPES (stmt) == NULL)
1689 this_may_throw = false;
1690 data->last_goto = NULL;
1691 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1693 break;
1695 case EH_FILTER_EXPR:
1696 if (EH_FILTER_MUST_NOT_THROW (stmt))
1697 this_may_throw = false;
1698 else if (EH_FILTER_TYPES (stmt) == NULL)
1699 this_may_throw = false;
1700 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1701 break;
1703 default:
1704 /* Otherwise this is a cleanup. */
1705 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1707 /* If the cleanup is empty, then we can emit the TRY block without
1708 the enclosing TRY_CATCH_EXPR. */
1709 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1711 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1712 data->repeat = true;
1714 break;
1716 data->may_throw |= this_may_throw;
1720 static void
1721 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1723 tree block;
1725 /* First remove anything underneath the BIND_EXPR. */
1726 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1728 /* If the BIND_EXPR has no variables, then we can pull everything
1729 up one level and remove the BIND_EXPR, unless this is the toplevel
1730 BIND_EXPR for the current function or an inlined function.
1732 When this situation occurs we will want to apply this
1733 optimization again. */
1734 block = BIND_EXPR_BLOCK (*stmt_p);
1735 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1736 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1737 && (! block
1738 || ! BLOCK_ABSTRACT_ORIGIN (block)
1739 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1740 != FUNCTION_DECL)))
1742 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1743 data->repeat = true;
1748 static void
1749 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1751 tree dest = GOTO_DESTINATION (*stmt_p);
1753 data->may_branch = true;
1754 data->last_goto = NULL;
1756 /* Record the last goto expr, so that we can delete it if unnecessary. */
1757 if (TREE_CODE (dest) == LABEL_DECL)
1758 data->last_goto = stmt_p;
1762 static void
1763 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1765 tree label = LABEL_EXPR_LABEL (*stmt_p);
1767 data->has_label = true;
1769 /* We do want to jump across non-local label receiver code. */
1770 if (DECL_NONLOCAL (label))
1771 data->last_goto = NULL;
1773 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1775 *data->last_goto = build_empty_stmt ();
1776 data->repeat = true;
1779 /* ??? Add something here to delete unused labels. */
1783 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1784 decl. This allows us to eliminate redundant or useless
1785 calls to "const" functions.
1787 Gimplifier already does the same operation, but we may notice functions
1788 being const and pure once their calls has been gimplified, so we need
1789 to update the flag. */
1791 static void
1792 update_call_expr_flags (tree call)
1794 tree decl = get_callee_fndecl (call);
1795 if (!decl)
1796 return;
1797 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1798 TREE_SIDE_EFFECTS (call) = 0;
1799 if (TREE_NOTHROW (decl))
1800 TREE_NOTHROW (call) = 1;
1804 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1806 void
1807 notice_special_calls (tree t)
1809 int flags = call_expr_flags (t);
1811 if (flags & ECF_MAY_BE_ALLOCA)
1812 current_function_calls_alloca = true;
1813 if (flags & ECF_RETURNS_TWICE)
1814 current_function_calls_setjmp = true;
1818 /* Clear flags set by notice_special_calls. Used by dead code removal
1819 to update the flags. */
1821 void
1822 clear_special_calls (void)
1824 current_function_calls_alloca = false;
1825 current_function_calls_setjmp = false;
1829 static void
1830 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1832 tree t = *tp, op;
1834 switch (TREE_CODE (t))
1836 case COND_EXPR:
1837 remove_useless_stmts_cond (tp, data);
1838 break;
1840 case TRY_FINALLY_EXPR:
1841 remove_useless_stmts_tf (tp, data);
1842 break;
1844 case TRY_CATCH_EXPR:
1845 remove_useless_stmts_tc (tp, data);
1846 break;
1848 case BIND_EXPR:
1849 remove_useless_stmts_bind (tp, data);
1850 break;
1852 case GOTO_EXPR:
1853 remove_useless_stmts_goto (tp, data);
1854 break;
1856 case LABEL_EXPR:
1857 remove_useless_stmts_label (tp, data);
1858 break;
1860 case RETURN_EXPR:
1861 fold_stmt (tp);
1862 data->last_goto = NULL;
1863 data->may_branch = true;
1864 break;
1866 case CALL_EXPR:
1867 fold_stmt (tp);
1868 data->last_goto = NULL;
1869 notice_special_calls (t);
1870 update_call_expr_flags (t);
1871 if (tree_could_throw_p (t))
1872 data->may_throw = true;
1873 break;
1875 case MODIFY_EXPR:
1876 data->last_goto = NULL;
1877 fold_stmt (tp);
1878 op = get_call_expr_in (t);
1879 if (op)
1881 update_call_expr_flags (op);
1882 notice_special_calls (op);
1884 if (tree_could_throw_p (t))
1885 data->may_throw = true;
1886 break;
1888 case STATEMENT_LIST:
1890 tree_stmt_iterator i = tsi_start (t);
1891 while (!tsi_end_p (i))
1893 t = tsi_stmt (i);
1894 if (IS_EMPTY_STMT (t))
1896 tsi_delink (&i);
1897 continue;
1900 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1902 t = tsi_stmt (i);
1903 if (TREE_CODE (t) == STATEMENT_LIST)
1905 tsi_link_before (&i, t, TSI_SAME_STMT);
1906 tsi_delink (&i);
1908 else
1909 tsi_next (&i);
1912 break;
1913 case ASM_EXPR:
1914 fold_stmt (tp);
1915 data->last_goto = NULL;
1916 break;
1918 default:
1919 data->last_goto = NULL;
1920 break;
1924 static void
1925 remove_useless_stmts (void)
1927 struct rus_data data;
1929 clear_special_calls ();
1933 memset (&data, 0, sizeof (data));
1934 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1936 while (data.repeat);
1940 struct tree_opt_pass pass_remove_useless_stmts =
1942 "useless", /* name */
1943 NULL, /* gate */
1944 remove_useless_stmts, /* execute */
1945 NULL, /* sub */
1946 NULL, /* next */
1947 0, /* static_pass_number */
1948 0, /* tv_id */
1949 PROP_gimple_any, /* properties_required */
1950 0, /* properties_provided */
1951 0, /* properties_destroyed */
1952 0, /* todo_flags_start */
1953 TODO_dump_func, /* todo_flags_finish */
1954 0 /* letter */
1957 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1959 static void
1960 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1962 tree phi;
1964 /* Since this block is no longer reachable, we can just delete all
1965 of its PHI nodes. */
1966 phi = phi_nodes (bb);
1967 while (phi)
1969 tree next = PHI_CHAIN (phi);
1970 remove_phi_node (phi, NULL_TREE);
1971 phi = next;
1974 /* Remove edges to BB's successors. */
1975 while (EDGE_COUNT (bb->succs) > 0)
1976 remove_edge (EDGE_SUCC (bb, 0));
1980 /* Remove statements of basic block BB. */
1982 static void
1983 remove_bb (basic_block bb)
1985 block_stmt_iterator i;
1986 #ifdef USE_MAPPED_LOCATION
1987 source_location loc = UNKNOWN_LOCATION;
1988 #else
1989 source_locus loc = 0;
1990 #endif
1992 if (dump_file)
1994 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1995 if (dump_flags & TDF_DETAILS)
1997 dump_bb (bb, dump_file, 0);
1998 fprintf (dump_file, "\n");
2002 /* If we remove the header or the latch of a loop, mark the loop for
2003 removal by setting its header and latch to NULL. */
2004 if (current_loops)
2006 struct loop *loop = bb->loop_father;
2008 if (loop->latch == bb
2009 || loop->header == bb)
2011 loop->latch = NULL;
2012 loop->header = NULL;
2014 /* Also clean up the information associated with the loop. Updating
2015 it would waste time. More importantly, it may refer to ssa
2016 names that were defined in other removed basic block -- these
2017 ssa names are now removed and invalid. */
2018 free_numbers_of_iterations_estimates_loop (loop);
2022 /* Remove all the instructions in the block. */
2023 for (i = bsi_start (bb); !bsi_end_p (i);)
2025 tree stmt = bsi_stmt (i);
2026 if (TREE_CODE (stmt) == LABEL_EXPR
2027 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2028 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2030 basic_block new_bb;
2031 block_stmt_iterator new_bsi;
2033 /* A non-reachable non-local label may still be referenced.
2034 But it no longer needs to carry the extra semantics of
2035 non-locality. */
2036 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2038 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2039 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2042 new_bb = bb->prev_bb;
2043 new_bsi = bsi_start (new_bb);
2044 bsi_remove (&i);
2045 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2047 else
2049 /* Release SSA definitions if we are in SSA. Note that we
2050 may be called when not in SSA. For example,
2051 final_cleanup calls this function via
2052 cleanup_tree_cfg. */
2053 if (in_ssa_p)
2054 release_defs (stmt);
2056 bsi_remove (&i);
2059 /* Don't warn for removed gotos. Gotos are often removed due to
2060 jump threading, thus resulting in bogus warnings. Not great,
2061 since this way we lose warnings for gotos in the original
2062 program that are indeed unreachable. */
2063 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2065 #ifdef USE_MAPPED_LOCATION
2066 if (EXPR_HAS_LOCATION (stmt))
2067 loc = EXPR_LOCATION (stmt);
2068 #else
2069 source_locus t;
2070 t = EXPR_LOCUS (stmt);
2071 if (t && LOCATION_LINE (*t) > 0)
2072 loc = t;
2073 #endif
2077 /* If requested, give a warning that the first statement in the
2078 block is unreachable. We walk statements backwards in the
2079 loop above, so the last statement we process is the first statement
2080 in the block. */
2081 #ifdef USE_MAPPED_LOCATION
2082 if (loc > BUILTINS_LOCATION)
2083 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2084 #else
2085 if (loc)
2086 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2087 #endif
2089 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2093 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2094 predicate VAL, return the edge that will be taken out of the block.
2095 If VAL does not match a unique edge, NULL is returned. */
2097 edge
2098 find_taken_edge (basic_block bb, tree val)
2100 tree stmt;
2102 stmt = last_stmt (bb);
2104 gcc_assert (stmt);
2105 gcc_assert (is_ctrl_stmt (stmt));
2106 gcc_assert (val);
2108 if (! is_gimple_min_invariant (val))
2109 return NULL;
2111 if (TREE_CODE (stmt) == COND_EXPR)
2112 return find_taken_edge_cond_expr (bb, val);
2114 if (TREE_CODE (stmt) == SWITCH_EXPR)
2115 return find_taken_edge_switch_expr (bb, val);
2117 if (computed_goto_p (stmt))
2118 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2120 gcc_unreachable ();
2123 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2124 statement, determine which of the outgoing edges will be taken out of the
2125 block. Return NULL if either edge may be taken. */
2127 static edge
2128 find_taken_edge_computed_goto (basic_block bb, tree val)
2130 basic_block dest;
2131 edge e = NULL;
2133 dest = label_to_block (val);
2134 if (dest)
2136 e = find_edge (bb, dest);
2137 gcc_assert (e != NULL);
2140 return e;
2143 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2144 statement, determine which of the two edges will be taken out of the
2145 block. Return NULL if either edge may be taken. */
2147 static edge
2148 find_taken_edge_cond_expr (basic_block bb, tree val)
2150 edge true_edge, false_edge;
2152 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2154 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2155 return (zero_p (val) ? false_edge : true_edge);
2158 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2159 statement, determine which edge will be taken out of the block. Return
2160 NULL if any edge may be taken. */
2162 static edge
2163 find_taken_edge_switch_expr (basic_block bb, tree val)
2165 tree switch_expr, taken_case;
2166 basic_block dest_bb;
2167 edge e;
2169 switch_expr = last_stmt (bb);
2170 taken_case = find_case_label_for_value (switch_expr, val);
2171 dest_bb = label_to_block (CASE_LABEL (taken_case));
2173 e = find_edge (bb, dest_bb);
2174 gcc_assert (e);
2175 return e;
2179 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2180 We can make optimal use here of the fact that the case labels are
2181 sorted: We can do a binary search for a case matching VAL. */
2183 static tree
2184 find_case_label_for_value (tree switch_expr, tree val)
2186 tree vec = SWITCH_LABELS (switch_expr);
2187 size_t low, high, n = TREE_VEC_LENGTH (vec);
2188 tree default_case = TREE_VEC_ELT (vec, n - 1);
2190 for (low = -1, high = n - 1; high - low > 1; )
2192 size_t i = (high + low) / 2;
2193 tree t = TREE_VEC_ELT (vec, i);
2194 int cmp;
2196 /* Cache the result of comparing CASE_LOW and val. */
2197 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2199 if (cmp > 0)
2200 high = i;
2201 else
2202 low = i;
2204 if (CASE_HIGH (t) == NULL)
2206 /* A singe-valued case label. */
2207 if (cmp == 0)
2208 return t;
2210 else
2212 /* A case range. We can only handle integer ranges. */
2213 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2214 return t;
2218 return default_case;
2224 /*---------------------------------------------------------------------------
2225 Debugging functions
2226 ---------------------------------------------------------------------------*/
2228 /* Dump tree-specific information of block BB to file OUTF. */
2230 void
2231 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2233 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2237 /* Dump a basic block on stderr. */
2239 void
2240 debug_tree_bb (basic_block bb)
2242 dump_bb (bb, stderr, 0);
2246 /* Dump basic block with index N on stderr. */
2248 basic_block
2249 debug_tree_bb_n (int n)
2251 debug_tree_bb (BASIC_BLOCK (n));
2252 return BASIC_BLOCK (n);
2256 /* Dump the CFG on stderr.
2258 FLAGS are the same used by the tree dumping functions
2259 (see TDF_* in tree.h). */
2261 void
2262 debug_tree_cfg (int flags)
2264 dump_tree_cfg (stderr, flags);
2268 /* Dump the program showing basic block boundaries on the given FILE.
2270 FLAGS are the same used by the tree dumping functions (see TDF_* in
2271 tree.h). */
2273 void
2274 dump_tree_cfg (FILE *file, int flags)
2276 if (flags & TDF_DETAILS)
2278 const char *funcname
2279 = lang_hooks.decl_printable_name (current_function_decl, 2);
2281 fputc ('\n', file);
2282 fprintf (file, ";; Function %s\n\n", funcname);
2283 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2284 n_basic_blocks, n_edges, last_basic_block);
2286 brief_dump_cfg (file);
2287 fprintf (file, "\n");
2290 if (flags & TDF_STATS)
2291 dump_cfg_stats (file);
2293 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2297 /* Dump CFG statistics on FILE. */
2299 void
2300 dump_cfg_stats (FILE *file)
2302 static long max_num_merged_labels = 0;
2303 unsigned long size, total = 0;
2304 long num_edges;
2305 basic_block bb;
2306 const char * const fmt_str = "%-30s%-13s%12s\n";
2307 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2308 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2309 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2310 const char *funcname
2311 = lang_hooks.decl_printable_name (current_function_decl, 2);
2314 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2316 fprintf (file, "---------------------------------------------------------\n");
2317 fprintf (file, fmt_str, "", " Number of ", "Memory");
2318 fprintf (file, fmt_str, "", " instances ", "used ");
2319 fprintf (file, "---------------------------------------------------------\n");
2321 size = n_basic_blocks * sizeof (struct basic_block_def);
2322 total += size;
2323 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2324 SCALE (size), LABEL (size));
2326 num_edges = 0;
2327 FOR_EACH_BB (bb)
2328 num_edges += EDGE_COUNT (bb->succs);
2329 size = num_edges * sizeof (struct edge_def);
2330 total += size;
2331 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2333 fprintf (file, "---------------------------------------------------------\n");
2334 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2335 LABEL (total));
2336 fprintf (file, "---------------------------------------------------------\n");
2337 fprintf (file, "\n");
2339 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2340 max_num_merged_labels = cfg_stats.num_merged_labels;
2342 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2343 cfg_stats.num_merged_labels, max_num_merged_labels);
2345 fprintf (file, "\n");
2349 /* Dump CFG statistics on stderr. Keep extern so that it's always
2350 linked in the final executable. */
2352 void
2353 debug_cfg_stats (void)
2355 dump_cfg_stats (stderr);
2359 /* Dump the flowgraph to a .vcg FILE. */
2361 static void
2362 tree_cfg2vcg (FILE *file)
2364 edge e;
2365 edge_iterator ei;
2366 basic_block bb;
2367 const char *funcname
2368 = lang_hooks.decl_printable_name (current_function_decl, 2);
2370 /* Write the file header. */
2371 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2372 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2373 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2375 /* Write blocks and edges. */
2376 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2378 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2379 e->dest->index);
2381 if (e->flags & EDGE_FAKE)
2382 fprintf (file, " linestyle: dotted priority: 10");
2383 else
2384 fprintf (file, " linestyle: solid priority: 100");
2386 fprintf (file, " }\n");
2388 fputc ('\n', file);
2390 FOR_EACH_BB (bb)
2392 enum tree_code head_code, end_code;
2393 const char *head_name, *end_name;
2394 int head_line = 0;
2395 int end_line = 0;
2396 tree first = first_stmt (bb);
2397 tree last = last_stmt (bb);
2399 if (first)
2401 head_code = TREE_CODE (first);
2402 head_name = tree_code_name[head_code];
2403 head_line = get_lineno (first);
2405 else
2406 head_name = "no-statement";
2408 if (last)
2410 end_code = TREE_CODE (last);
2411 end_name = tree_code_name[end_code];
2412 end_line = get_lineno (last);
2414 else
2415 end_name = "no-statement";
2417 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2418 bb->index, bb->index, head_name, head_line, end_name,
2419 end_line);
2421 FOR_EACH_EDGE (e, ei, bb->succs)
2423 if (e->dest == EXIT_BLOCK_PTR)
2424 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2425 else
2426 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2428 if (e->flags & EDGE_FAKE)
2429 fprintf (file, " priority: 10 linestyle: dotted");
2430 else
2431 fprintf (file, " priority: 100 linestyle: solid");
2433 fprintf (file, " }\n");
2436 if (bb->next_bb != EXIT_BLOCK_PTR)
2437 fputc ('\n', file);
2440 fputs ("}\n\n", file);
2445 /*---------------------------------------------------------------------------
2446 Miscellaneous helpers
2447 ---------------------------------------------------------------------------*/
2449 /* Return true if T represents a stmt that always transfers control. */
2451 bool
2452 is_ctrl_stmt (tree t)
2454 return (TREE_CODE (t) == COND_EXPR
2455 || TREE_CODE (t) == SWITCH_EXPR
2456 || TREE_CODE (t) == GOTO_EXPR
2457 || TREE_CODE (t) == RETURN_EXPR
2458 || TREE_CODE (t) == RESX_EXPR);
2462 /* Return true if T is a statement that may alter the flow of control
2463 (e.g., a call to a non-returning function). */
2465 bool
2466 is_ctrl_altering_stmt (tree t)
2468 tree call;
2470 gcc_assert (t);
2471 call = get_call_expr_in (t);
2472 if (call)
2474 /* A non-pure/const CALL_EXPR alters flow control if the current
2475 function has nonlocal labels. */
2476 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2477 return true;
2479 /* A CALL_EXPR also alters control flow if it does not return. */
2480 if (call_expr_flags (call) & ECF_NORETURN)
2481 return true;
2484 /* If a statement can throw, it alters control flow. */
2485 return tree_can_throw_internal (t);
2489 /* Return true if T is a computed goto. */
2491 bool
2492 computed_goto_p (tree t)
2494 return (TREE_CODE (t) == GOTO_EXPR
2495 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2499 /* Checks whether EXPR is a simple local goto. */
2501 bool
2502 simple_goto_p (tree expr)
2504 return (TREE_CODE (expr) == GOTO_EXPR
2505 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2509 /* Return true if T should start a new basic block. PREV_T is the
2510 statement preceding T. It is used when T is a label or a case label.
2511 Labels should only start a new basic block if their previous statement
2512 wasn't a label. Otherwise, sequence of labels would generate
2513 unnecessary basic blocks that only contain a single label. */
2515 static inline bool
2516 stmt_starts_bb_p (tree t, tree prev_t)
2518 if (t == NULL_TREE)
2519 return false;
2521 /* LABEL_EXPRs start a new basic block only if the preceding
2522 statement wasn't a label of the same type. This prevents the
2523 creation of consecutive blocks that have nothing but a single
2524 label. */
2525 if (TREE_CODE (t) == LABEL_EXPR)
2527 /* Nonlocal and computed GOTO targets always start a new block. */
2528 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2529 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2530 return true;
2532 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2534 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2535 return true;
2537 cfg_stats.num_merged_labels++;
2538 return false;
2540 else
2541 return true;
2544 return false;
2548 /* Return true if T should end a basic block. */
2550 bool
2551 stmt_ends_bb_p (tree t)
2553 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2557 /* Add gotos that used to be represented implicitly in the CFG. */
2559 void
2560 disband_implicit_edges (void)
2562 basic_block bb;
2563 block_stmt_iterator last;
2564 edge e;
2565 edge_iterator ei;
2566 tree stmt, label;
2568 FOR_EACH_BB (bb)
2570 last = bsi_last (bb);
2571 stmt = last_stmt (bb);
2573 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2575 /* Remove superfluous gotos from COND_EXPR branches. Moved
2576 from cfg_remove_useless_stmts here since it violates the
2577 invariants for tree--cfg correspondence and thus fits better
2578 here where we do it anyway. */
2579 e = find_edge (bb, bb->next_bb);
2580 if (e)
2582 if (e->flags & EDGE_TRUE_VALUE)
2583 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2584 else if (e->flags & EDGE_FALSE_VALUE)
2585 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2586 else
2587 gcc_unreachable ();
2588 e->flags |= EDGE_FALLTHRU;
2591 continue;
2594 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2596 /* Remove the RETURN_EXPR if we may fall though to the exit
2597 instead. */
2598 gcc_assert (single_succ_p (bb));
2599 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2601 if (bb->next_bb == EXIT_BLOCK_PTR
2602 && !TREE_OPERAND (stmt, 0))
2604 bsi_remove (&last);
2605 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2607 continue;
2610 /* There can be no fallthru edge if the last statement is a control
2611 one. */
2612 if (stmt && is_ctrl_stmt (stmt))
2613 continue;
2615 /* Find a fallthru edge and emit the goto if necessary. */
2616 FOR_EACH_EDGE (e, ei, bb->succs)
2617 if (e->flags & EDGE_FALLTHRU)
2618 break;
2620 if (!e || e->dest == bb->next_bb)
2621 continue;
2623 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2624 label = tree_block_label (e->dest);
2626 stmt = build1 (GOTO_EXPR, void_type_node, label);
2627 #ifdef USE_MAPPED_LOCATION
2628 SET_EXPR_LOCATION (stmt, e->goto_locus);
2629 #else
2630 SET_EXPR_LOCUS (stmt, e->goto_locus);
2631 #endif
2632 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2633 e->flags &= ~EDGE_FALLTHRU;
2637 /* Remove block annotations and other datastructures. */
2639 void
2640 delete_tree_cfg_annotations (void)
2642 label_to_block_map = NULL;
2646 /* Return the first statement in basic block BB. */
2648 tree
2649 first_stmt (basic_block bb)
2651 block_stmt_iterator i = bsi_start (bb);
2652 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2656 /* Return the last statement in basic block BB. */
2658 tree
2659 last_stmt (basic_block bb)
2661 block_stmt_iterator b = bsi_last (bb);
2662 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2666 /* Return a pointer to the last statement in block BB. */
2668 tree *
2669 last_stmt_ptr (basic_block bb)
2671 block_stmt_iterator last = bsi_last (bb);
2672 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2676 /* Return the last statement of an otherwise empty block. Return NULL
2677 if the block is totally empty, or if it contains more than one
2678 statement. */
2680 tree
2681 last_and_only_stmt (basic_block bb)
2683 block_stmt_iterator i = bsi_last (bb);
2684 tree last, prev;
2686 if (bsi_end_p (i))
2687 return NULL_TREE;
2689 last = bsi_stmt (i);
2690 bsi_prev (&i);
2691 if (bsi_end_p (i))
2692 return last;
2694 /* Empty statements should no longer appear in the instruction stream.
2695 Everything that might have appeared before should be deleted by
2696 remove_useless_stmts, and the optimizers should just bsi_remove
2697 instead of smashing with build_empty_stmt.
2699 Thus the only thing that should appear here in a block containing
2700 one executable statement is a label. */
2701 prev = bsi_stmt (i);
2702 if (TREE_CODE (prev) == LABEL_EXPR)
2703 return last;
2704 else
2705 return NULL_TREE;
2709 /* Mark BB as the basic block holding statement T. */
2711 void
2712 set_bb_for_stmt (tree t, basic_block bb)
2714 if (TREE_CODE (t) == PHI_NODE)
2715 PHI_BB (t) = bb;
2716 else if (TREE_CODE (t) == STATEMENT_LIST)
2718 tree_stmt_iterator i;
2719 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2720 set_bb_for_stmt (tsi_stmt (i), bb);
2722 else
2724 stmt_ann_t ann = get_stmt_ann (t);
2725 ann->bb = bb;
2727 /* If the statement is a label, add the label to block-to-labels map
2728 so that we can speed up edge creation for GOTO_EXPRs. */
2729 if (TREE_CODE (t) == LABEL_EXPR)
2731 int uid;
2733 t = LABEL_EXPR_LABEL (t);
2734 uid = LABEL_DECL_UID (t);
2735 if (uid == -1)
2737 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2738 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2739 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2741 else
2742 /* We're moving an existing label. Make sure that we've
2743 removed it from the old block. */
2744 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2745 VARRAY_BB (label_to_block_map, uid) = bb;
2750 /* Finds iterator for STMT. */
2752 extern block_stmt_iterator
2753 bsi_for_stmt (tree stmt)
2755 block_stmt_iterator bsi;
2757 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2758 if (bsi_stmt (bsi) == stmt)
2759 return bsi;
2761 gcc_unreachable ();
2764 /* Mark statement T as modified, and update it. */
2765 static inline void
2766 update_modified_stmts (tree t)
2768 if (TREE_CODE (t) == STATEMENT_LIST)
2770 tree_stmt_iterator i;
2771 tree stmt;
2772 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2774 stmt = tsi_stmt (i);
2775 update_stmt_if_modified (stmt);
2778 else
2779 update_stmt_if_modified (t);
2782 /* Insert statement (or statement list) T before the statement
2783 pointed-to by iterator I. M specifies how to update iterator I
2784 after insertion (see enum bsi_iterator_update). */
2786 void
2787 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2789 set_bb_for_stmt (t, i->bb);
2790 update_modified_stmts (t);
2791 tsi_link_before (&i->tsi, t, m);
2795 /* Insert statement (or statement list) T after the statement
2796 pointed-to by iterator I. M specifies how to update iterator I
2797 after insertion (see enum bsi_iterator_update). */
2799 void
2800 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2802 set_bb_for_stmt (t, i->bb);
2803 update_modified_stmts (t);
2804 tsi_link_after (&i->tsi, t, m);
2808 /* Remove the statement pointed to by iterator I. The iterator is updated
2809 to the next statement. */
2811 void
2812 bsi_remove (block_stmt_iterator *i)
2814 tree t = bsi_stmt (*i);
2815 set_bb_for_stmt (t, NULL);
2816 delink_stmt_imm_use (t);
2817 tsi_delink (&i->tsi);
2818 mark_stmt_modified (t);
2822 /* Move the statement at FROM so it comes right after the statement at TO. */
2824 void
2825 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2827 tree stmt = bsi_stmt (*from);
2828 bsi_remove (from);
2829 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2833 /* Move the statement at FROM so it comes right before the statement at TO. */
2835 void
2836 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2838 tree stmt = bsi_stmt (*from);
2839 bsi_remove (from);
2840 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2844 /* Move the statement at FROM to the end of basic block BB. */
2846 void
2847 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2849 block_stmt_iterator last = bsi_last (bb);
2851 /* Have to check bsi_end_p because it could be an empty block. */
2852 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2853 bsi_move_before (from, &last);
2854 else
2855 bsi_move_after (from, &last);
2859 /* Replace the contents of the statement pointed to by iterator BSI
2860 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2861 information of the original statement is preserved. */
2863 void
2864 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2866 int eh_region;
2867 tree orig_stmt = bsi_stmt (*bsi);
2869 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2870 set_bb_for_stmt (stmt, bsi->bb);
2872 /* Preserve EH region information from the original statement, if
2873 requested by the caller. */
2874 if (preserve_eh_info)
2876 eh_region = lookup_stmt_eh_region (orig_stmt);
2877 if (eh_region >= 0)
2878 add_stmt_to_eh_region (stmt, eh_region);
2881 delink_stmt_imm_use (orig_stmt);
2882 *bsi_stmt_ptr (*bsi) = stmt;
2883 mark_stmt_modified (stmt);
2884 update_modified_stmts (stmt);
2888 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2889 is made to place the statement in an existing basic block, but
2890 sometimes that isn't possible. When it isn't possible, the edge is
2891 split and the statement is added to the new block.
2893 In all cases, the returned *BSI points to the correct location. The
2894 return value is true if insertion should be done after the location,
2895 or false if it should be done before the location. If new basic block
2896 has to be created, it is stored in *NEW_BB. */
2898 static bool
2899 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2900 basic_block *new_bb)
2902 basic_block dest, src;
2903 tree tmp;
2905 dest = e->dest;
2906 restart:
2908 /* If the destination has one predecessor which has no PHI nodes,
2909 insert there. Except for the exit block.
2911 The requirement for no PHI nodes could be relaxed. Basically we
2912 would have to examine the PHIs to prove that none of them used
2913 the value set by the statement we want to insert on E. That
2914 hardly seems worth the effort. */
2915 if (single_pred_p (dest)
2916 && ! phi_nodes (dest)
2917 && dest != EXIT_BLOCK_PTR)
2919 *bsi = bsi_start (dest);
2920 if (bsi_end_p (*bsi))
2921 return true;
2923 /* Make sure we insert after any leading labels. */
2924 tmp = bsi_stmt (*bsi);
2925 while (TREE_CODE (tmp) == LABEL_EXPR)
2927 bsi_next (bsi);
2928 if (bsi_end_p (*bsi))
2929 break;
2930 tmp = bsi_stmt (*bsi);
2933 if (bsi_end_p (*bsi))
2935 *bsi = bsi_last (dest);
2936 return true;
2938 else
2939 return false;
2942 /* If the source has one successor, the edge is not abnormal and
2943 the last statement does not end a basic block, insert there.
2944 Except for the entry block. */
2945 src = e->src;
2946 if ((e->flags & EDGE_ABNORMAL) == 0
2947 && single_succ_p (src)
2948 && src != ENTRY_BLOCK_PTR)
2950 *bsi = bsi_last (src);
2951 if (bsi_end_p (*bsi))
2952 return true;
2954 tmp = bsi_stmt (*bsi);
2955 if (!stmt_ends_bb_p (tmp))
2956 return true;
2958 /* Insert code just before returning the value. We may need to decompose
2959 the return in the case it contains non-trivial operand. */
2960 if (TREE_CODE (tmp) == RETURN_EXPR)
2962 tree op = TREE_OPERAND (tmp, 0);
2963 if (op && !is_gimple_val (op))
2965 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2966 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2967 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2969 bsi_prev (bsi);
2970 return true;
2974 /* Otherwise, create a new basic block, and split this edge. */
2975 dest = split_edge (e);
2976 if (new_bb)
2977 *new_bb = dest;
2978 e = single_pred_edge (dest);
2979 goto restart;
2983 /* This routine will commit all pending edge insertions, creating any new
2984 basic blocks which are necessary. */
2986 void
2987 bsi_commit_edge_inserts (void)
2989 basic_block bb;
2990 edge e;
2991 edge_iterator ei;
2993 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2995 FOR_EACH_BB (bb)
2996 FOR_EACH_EDGE (e, ei, bb->succs)
2997 bsi_commit_one_edge_insert (e, NULL);
3001 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3002 to this block, otherwise set it to NULL. */
3004 void
3005 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3007 if (new_bb)
3008 *new_bb = NULL;
3009 if (PENDING_STMT (e))
3011 block_stmt_iterator bsi;
3012 tree stmt = PENDING_STMT (e);
3014 PENDING_STMT (e) = NULL_TREE;
3016 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3017 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3018 else
3019 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3024 /* Add STMT to the pending list of edge E. No actual insertion is
3025 made until a call to bsi_commit_edge_inserts () is made. */
3027 void
3028 bsi_insert_on_edge (edge e, tree stmt)
3030 append_to_statement_list (stmt, &PENDING_STMT (e));
3033 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3034 block has to be created, it is returned. */
3036 basic_block
3037 bsi_insert_on_edge_immediate (edge e, tree stmt)
3039 block_stmt_iterator bsi;
3040 basic_block new_bb = NULL;
3042 gcc_assert (!PENDING_STMT (e));
3044 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3045 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3046 else
3047 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3049 return new_bb;
3052 /*---------------------------------------------------------------------------
3053 Tree specific functions for CFG manipulation
3054 ---------------------------------------------------------------------------*/
3056 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3058 static void
3059 reinstall_phi_args (edge new_edge, edge old_edge)
3061 tree var, phi;
3063 if (!PENDING_STMT (old_edge))
3064 return;
3066 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3067 var && phi;
3068 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3070 tree result = TREE_PURPOSE (var);
3071 tree arg = TREE_VALUE (var);
3073 gcc_assert (result == PHI_RESULT (phi));
3075 add_phi_arg (phi, arg, new_edge);
3078 PENDING_STMT (old_edge) = NULL;
3081 /* Returns the basic block after that the new basic block created
3082 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3083 near its "logical" location. This is of most help to humans looking
3084 at debugging dumps. */
3086 static basic_block
3087 split_edge_bb_loc (edge edge_in)
3089 basic_block dest = edge_in->dest;
3091 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3092 return edge_in->src;
3093 else
3094 return dest->prev_bb;
3097 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3098 Abort on abnormal edges. */
3100 static basic_block
3101 tree_split_edge (edge edge_in)
3103 basic_block new_bb, after_bb, dest, src;
3104 edge new_edge, e;
3106 /* Abnormal edges cannot be split. */
3107 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3109 src = edge_in->src;
3110 dest = edge_in->dest;
3112 after_bb = split_edge_bb_loc (edge_in);
3114 new_bb = create_empty_bb (after_bb);
3115 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3116 new_bb->count = edge_in->count;
3117 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3118 new_edge->probability = REG_BR_PROB_BASE;
3119 new_edge->count = edge_in->count;
3121 e = redirect_edge_and_branch (edge_in, new_bb);
3122 gcc_assert (e);
3123 reinstall_phi_args (new_edge, e);
3125 return new_bb;
3129 /* Return true when BB has label LABEL in it. */
3131 static bool
3132 has_label_p (basic_block bb, tree label)
3134 block_stmt_iterator bsi;
3136 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3138 tree stmt = bsi_stmt (bsi);
3140 if (TREE_CODE (stmt) != LABEL_EXPR)
3141 return false;
3142 if (LABEL_EXPR_LABEL (stmt) == label)
3143 return true;
3145 return false;
3149 /* Callback for walk_tree, check that all elements with address taken are
3150 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3151 inside a PHI node. */
3153 static tree
3154 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3156 tree t = *tp, x;
3157 bool in_phi = (data != NULL);
3159 if (TYPE_P (t))
3160 *walk_subtrees = 0;
3162 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3163 #define CHECK_OP(N, MSG) \
3164 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3165 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3167 switch (TREE_CODE (t))
3169 case SSA_NAME:
3170 if (SSA_NAME_IN_FREE_LIST (t))
3172 error ("SSA name in freelist but still referenced");
3173 return *tp;
3175 break;
3177 case ASSERT_EXPR:
3178 x = fold (ASSERT_EXPR_COND (t));
3179 if (x == boolean_false_node)
3181 error ("ASSERT_EXPR with an always-false condition");
3182 return *tp;
3184 break;
3186 case MODIFY_EXPR:
3187 x = TREE_OPERAND (t, 0);
3188 if (TREE_CODE (x) == BIT_FIELD_REF
3189 && is_gimple_reg (TREE_OPERAND (x, 0)))
3191 error ("GIMPLE register modified with BIT_FIELD_REF");
3192 return t;
3194 break;
3196 case ADDR_EXPR:
3198 bool old_invariant;
3199 bool old_constant;
3200 bool old_side_effects;
3201 bool new_invariant;
3202 bool new_constant;
3203 bool new_side_effects;
3205 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3206 dead PHIs that take the address of something. But if the PHI
3207 result is dead, the fact that it takes the address of anything
3208 is irrelevant. Because we can not tell from here if a PHI result
3209 is dead, we just skip this check for PHIs altogether. This means
3210 we may be missing "valid" checks, but what can you do?
3211 This was PR19217. */
3212 if (in_phi)
3213 break;
3215 old_invariant = TREE_INVARIANT (t);
3216 old_constant = TREE_CONSTANT (t);
3217 old_side_effects = TREE_SIDE_EFFECTS (t);
3219 recompute_tree_invarant_for_addr_expr (t);
3220 new_invariant = TREE_INVARIANT (t);
3221 new_side_effects = TREE_SIDE_EFFECTS (t);
3222 new_constant = TREE_CONSTANT (t);
3224 if (old_invariant != new_invariant)
3226 error ("invariant not recomputed when ADDR_EXPR changed");
3227 return t;
3230 if (old_constant != new_constant)
3232 error ("constant not recomputed when ADDR_EXPR changed");
3233 return t;
3235 if (old_side_effects != new_side_effects)
3237 error ("side effects not recomputed when ADDR_EXPR changed");
3238 return t;
3241 /* Skip any references (they will be checked when we recurse down the
3242 tree) and ensure that any variable used as a prefix is marked
3243 addressable. */
3244 for (x = TREE_OPERAND (t, 0);
3245 handled_component_p (x);
3246 x = TREE_OPERAND (x, 0))
3249 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3250 return NULL;
3251 if (!TREE_ADDRESSABLE (x))
3253 error ("address taken, but ADDRESSABLE bit not set");
3254 return x;
3256 break;
3259 case COND_EXPR:
3260 x = COND_EXPR_COND (t);
3261 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3263 error ("non-boolean used in condition");
3264 return x;
3266 if (!is_gimple_condexpr (x))
3268 error ("invalid conditional operand");
3269 return x;
3271 break;
3273 case NOP_EXPR:
3274 case CONVERT_EXPR:
3275 case FIX_TRUNC_EXPR:
3276 case FIX_CEIL_EXPR:
3277 case FIX_FLOOR_EXPR:
3278 case FIX_ROUND_EXPR:
3279 case FLOAT_EXPR:
3280 case NEGATE_EXPR:
3281 case ABS_EXPR:
3282 case BIT_NOT_EXPR:
3283 case NON_LVALUE_EXPR:
3284 case TRUTH_NOT_EXPR:
3285 CHECK_OP (0, "invalid operand to unary operator");
3286 break;
3288 case REALPART_EXPR:
3289 case IMAGPART_EXPR:
3290 case COMPONENT_REF:
3291 case ARRAY_REF:
3292 case ARRAY_RANGE_REF:
3293 case BIT_FIELD_REF:
3294 case VIEW_CONVERT_EXPR:
3295 /* We have a nest of references. Verify that each of the operands
3296 that determine where to reference is either a constant or a variable,
3297 verify that the base is valid, and then show we've already checked
3298 the subtrees. */
3299 while (handled_component_p (t))
3301 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3302 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3303 else if (TREE_CODE (t) == ARRAY_REF
3304 || TREE_CODE (t) == ARRAY_RANGE_REF)
3306 CHECK_OP (1, "invalid array index");
3307 if (TREE_OPERAND (t, 2))
3308 CHECK_OP (2, "invalid array lower bound");
3309 if (TREE_OPERAND (t, 3))
3310 CHECK_OP (3, "invalid array stride");
3312 else if (TREE_CODE (t) == BIT_FIELD_REF)
3314 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3315 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3318 t = TREE_OPERAND (t, 0);
3321 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3323 error ("invalid reference prefix");
3324 return t;
3326 *walk_subtrees = 0;
3327 break;
3329 case LT_EXPR:
3330 case LE_EXPR:
3331 case GT_EXPR:
3332 case GE_EXPR:
3333 case EQ_EXPR:
3334 case NE_EXPR:
3335 case UNORDERED_EXPR:
3336 case ORDERED_EXPR:
3337 case UNLT_EXPR:
3338 case UNLE_EXPR:
3339 case UNGT_EXPR:
3340 case UNGE_EXPR:
3341 case UNEQ_EXPR:
3342 case LTGT_EXPR:
3343 case PLUS_EXPR:
3344 case MINUS_EXPR:
3345 case MULT_EXPR:
3346 case TRUNC_DIV_EXPR:
3347 case CEIL_DIV_EXPR:
3348 case FLOOR_DIV_EXPR:
3349 case ROUND_DIV_EXPR:
3350 case TRUNC_MOD_EXPR:
3351 case CEIL_MOD_EXPR:
3352 case FLOOR_MOD_EXPR:
3353 case ROUND_MOD_EXPR:
3354 case RDIV_EXPR:
3355 case EXACT_DIV_EXPR:
3356 case MIN_EXPR:
3357 case MAX_EXPR:
3358 case LSHIFT_EXPR:
3359 case RSHIFT_EXPR:
3360 case LROTATE_EXPR:
3361 case RROTATE_EXPR:
3362 case BIT_IOR_EXPR:
3363 case BIT_XOR_EXPR:
3364 case BIT_AND_EXPR:
3365 CHECK_OP (0, "invalid operand to binary operator");
3366 CHECK_OP (1, "invalid operand to binary operator");
3367 break;
3369 default:
3370 break;
3372 return NULL;
3374 #undef CHECK_OP
3378 /* Verify STMT, return true if STMT is not in GIMPLE form.
3379 TODO: Implement type checking. */
3381 static bool
3382 verify_stmt (tree stmt, bool last_in_block)
3384 tree addr;
3386 if (!is_gimple_stmt (stmt))
3388 error ("is not a valid GIMPLE statement");
3389 goto fail;
3392 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3393 if (addr)
3395 debug_generic_stmt (addr);
3396 return true;
3399 /* If the statement is marked as part of an EH region, then it is
3400 expected that the statement could throw. Verify that when we
3401 have optimizations that simplify statements such that we prove
3402 that they cannot throw, that we update other data structures
3403 to match. */
3404 if (lookup_stmt_eh_region (stmt) >= 0)
3406 if (!tree_could_throw_p (stmt))
3408 error ("statement marked for throw, but doesn%'t");
3409 goto fail;
3411 if (!last_in_block && tree_can_throw_internal (stmt))
3413 error ("statement marked for throw in middle of block");
3414 goto fail;
3418 return false;
3420 fail:
3421 debug_generic_stmt (stmt);
3422 return true;
3426 /* Return true when the T can be shared. */
3428 static bool
3429 tree_node_can_be_shared (tree t)
3431 if (IS_TYPE_OR_DECL_P (t)
3432 /* We check for constants explicitly since they are not considered
3433 gimple invariants if they overflowed. */
3434 || CONSTANT_CLASS_P (t)
3435 || is_gimple_min_invariant (t)
3436 || TREE_CODE (t) == SSA_NAME
3437 || t == error_mark_node)
3438 return true;
3440 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3441 return true;
3443 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3444 /* We check for constants explicitly since they are not considered
3445 gimple invariants if they overflowed. */
3446 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3447 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3448 || (TREE_CODE (t) == COMPONENT_REF
3449 || TREE_CODE (t) == REALPART_EXPR
3450 || TREE_CODE (t) == IMAGPART_EXPR))
3451 t = TREE_OPERAND (t, 0);
3453 if (DECL_P (t))
3454 return true;
3456 return false;
3460 /* Called via walk_trees. Verify tree sharing. */
3462 static tree
3463 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3465 htab_t htab = (htab_t) data;
3466 void **slot;
3468 if (tree_node_can_be_shared (*tp))
3470 *walk_subtrees = false;
3471 return NULL;
3474 slot = htab_find_slot (htab, *tp, INSERT);
3475 if (*slot)
3476 return *slot;
3477 *slot = *tp;
3479 return NULL;
3483 /* Verify the GIMPLE statement chain. */
3485 void
3486 verify_stmts (void)
3488 basic_block bb;
3489 block_stmt_iterator bsi;
3490 bool err = false;
3491 htab_t htab;
3492 tree addr;
3494 timevar_push (TV_TREE_STMT_VERIFY);
3495 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3497 FOR_EACH_BB (bb)
3499 tree phi;
3500 int i;
3502 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3504 int phi_num_args = PHI_NUM_ARGS (phi);
3506 if (bb_for_stmt (phi) != bb)
3508 error ("bb_for_stmt (phi) is set to a wrong basic block");
3509 err |= true;
3512 for (i = 0; i < phi_num_args; i++)
3514 tree t = PHI_ARG_DEF (phi, i);
3515 tree addr;
3517 /* Addressable variables do have SSA_NAMEs but they
3518 are not considered gimple values. */
3519 if (TREE_CODE (t) != SSA_NAME
3520 && TREE_CODE (t) != FUNCTION_DECL
3521 && !is_gimple_val (t))
3523 error ("PHI def is not a GIMPLE value");
3524 debug_generic_stmt (phi);
3525 debug_generic_stmt (t);
3526 err |= true;
3529 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3530 if (addr)
3532 debug_generic_stmt (addr);
3533 err |= true;
3536 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3537 if (addr)
3539 error ("incorrect sharing of tree nodes");
3540 debug_generic_stmt (phi);
3541 debug_generic_stmt (addr);
3542 err |= true;
3547 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3549 tree stmt = bsi_stmt (bsi);
3551 if (bb_for_stmt (stmt) != bb)
3553 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3554 err |= true;
3557 bsi_next (&bsi);
3558 err |= verify_stmt (stmt, bsi_end_p (bsi));
3559 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3560 if (addr)
3562 error ("incorrect sharing of tree nodes");
3563 debug_generic_stmt (stmt);
3564 debug_generic_stmt (addr);
3565 err |= true;
3570 if (err)
3571 internal_error ("verify_stmts failed");
3573 htab_delete (htab);
3574 timevar_pop (TV_TREE_STMT_VERIFY);
3578 /* Verifies that the flow information is OK. */
3580 static int
3581 tree_verify_flow_info (void)
3583 int err = 0;
3584 basic_block bb;
3585 block_stmt_iterator bsi;
3586 tree stmt;
3587 edge e;
3588 edge_iterator ei;
3590 if (ENTRY_BLOCK_PTR->stmt_list)
3592 error ("ENTRY_BLOCK has a statement list associated with it");
3593 err = 1;
3596 if (EXIT_BLOCK_PTR->stmt_list)
3598 error ("EXIT_BLOCK has a statement list associated with it");
3599 err = 1;
3602 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3603 if (e->flags & EDGE_FALLTHRU)
3605 error ("fallthru to exit from bb %d", e->src->index);
3606 err = 1;
3609 FOR_EACH_BB (bb)
3611 bool found_ctrl_stmt = false;
3613 stmt = NULL_TREE;
3615 /* Skip labels on the start of basic block. */
3616 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3618 tree prev_stmt = stmt;
3620 stmt = bsi_stmt (bsi);
3622 if (TREE_CODE (stmt) != LABEL_EXPR)
3623 break;
3625 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3627 error ("nonlocal label %s is not first "
3628 "in a sequence of labels in bb %d",
3629 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3630 bb->index);
3631 err = 1;
3634 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3636 error ("label %s to block does not match in bb %d",
3637 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3638 bb->index);
3639 err = 1;
3642 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3643 != current_function_decl)
3645 error ("label %s has incorrect context in bb %d",
3646 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3647 bb->index);
3648 err = 1;
3652 /* Verify that body of basic block BB is free of control flow. */
3653 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3655 tree stmt = bsi_stmt (bsi);
3657 if (found_ctrl_stmt)
3659 error ("control flow in the middle of basic block %d",
3660 bb->index);
3661 err = 1;
3664 if (stmt_ends_bb_p (stmt))
3665 found_ctrl_stmt = true;
3667 if (TREE_CODE (stmt) == LABEL_EXPR)
3669 error ("label %s in the middle of basic block %d",
3670 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3671 bb->index);
3672 err = 1;
3675 bsi = bsi_last (bb);
3676 if (bsi_end_p (bsi))
3677 continue;
3679 stmt = bsi_stmt (bsi);
3681 err |= verify_eh_edges (stmt);
3683 if (is_ctrl_stmt (stmt))
3685 FOR_EACH_EDGE (e, ei, bb->succs)
3686 if (e->flags & EDGE_FALLTHRU)
3688 error ("fallthru edge after a control statement in bb %d",
3689 bb->index);
3690 err = 1;
3694 if (TREE_CODE (stmt) != COND_EXPR)
3696 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3697 after anything else but if statement. */
3698 FOR_EACH_EDGE (e, ei, bb->succs)
3699 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3701 error ("true/false edge after a non-COND_EXPR in bb %d",
3702 bb->index);
3703 err = 1;
3707 switch (TREE_CODE (stmt))
3709 case COND_EXPR:
3711 edge true_edge;
3712 edge false_edge;
3713 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3714 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3716 error ("structured COND_EXPR at the end of bb %d", bb->index);
3717 err = 1;
3720 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3722 if (!true_edge || !false_edge
3723 || !(true_edge->flags & EDGE_TRUE_VALUE)
3724 || !(false_edge->flags & EDGE_FALSE_VALUE)
3725 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3726 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3727 || EDGE_COUNT (bb->succs) >= 3)
3729 error ("wrong outgoing edge flags at end of bb %d",
3730 bb->index);
3731 err = 1;
3734 if (!has_label_p (true_edge->dest,
3735 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3737 error ("%<then%> label does not match edge at end of bb %d",
3738 bb->index);
3739 err = 1;
3742 if (!has_label_p (false_edge->dest,
3743 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3745 error ("%<else%> label does not match edge at end of bb %d",
3746 bb->index);
3747 err = 1;
3750 break;
3752 case GOTO_EXPR:
3753 if (simple_goto_p (stmt))
3755 error ("explicit goto at end of bb %d", bb->index);
3756 err = 1;
3758 else
3760 /* FIXME. We should double check that the labels in the
3761 destination blocks have their address taken. */
3762 FOR_EACH_EDGE (e, ei, bb->succs)
3763 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3764 | EDGE_FALSE_VALUE))
3765 || !(e->flags & EDGE_ABNORMAL))
3767 error ("wrong outgoing edge flags at end of bb %d",
3768 bb->index);
3769 err = 1;
3772 break;
3774 case RETURN_EXPR:
3775 if (!single_succ_p (bb)
3776 || (single_succ_edge (bb)->flags
3777 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3778 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3780 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3781 err = 1;
3783 if (single_succ (bb) != EXIT_BLOCK_PTR)
3785 error ("return edge does not point to exit in bb %d",
3786 bb->index);
3787 err = 1;
3789 break;
3791 case SWITCH_EXPR:
3793 tree prev;
3794 edge e;
3795 size_t i, n;
3796 tree vec;
3798 vec = SWITCH_LABELS (stmt);
3799 n = TREE_VEC_LENGTH (vec);
3801 /* Mark all the destination basic blocks. */
3802 for (i = 0; i < n; ++i)
3804 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3805 basic_block label_bb = label_to_block (lab);
3807 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3808 label_bb->aux = (void *)1;
3811 /* Verify that the case labels are sorted. */
3812 prev = TREE_VEC_ELT (vec, 0);
3813 for (i = 1; i < n - 1; ++i)
3815 tree c = TREE_VEC_ELT (vec, i);
3816 if (! CASE_LOW (c))
3818 error ("found default case not at end of case vector");
3819 err = 1;
3820 continue;
3822 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3824 error ("case labels not sorted:");
3825 print_generic_expr (stderr, prev, 0);
3826 fprintf (stderr," is greater than ");
3827 print_generic_expr (stderr, c, 0);
3828 fprintf (stderr," but comes before it.\n");
3829 err = 1;
3831 prev = c;
3833 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3835 error ("no default case found at end of case vector");
3836 err = 1;
3839 FOR_EACH_EDGE (e, ei, bb->succs)
3841 if (!e->dest->aux)
3843 error ("extra outgoing edge %d->%d",
3844 bb->index, e->dest->index);
3845 err = 1;
3847 e->dest->aux = (void *)2;
3848 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3849 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3851 error ("wrong outgoing edge flags at end of bb %d",
3852 bb->index);
3853 err = 1;
3857 /* Check that we have all of them. */
3858 for (i = 0; i < n; ++i)
3860 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3861 basic_block label_bb = label_to_block (lab);
3863 if (label_bb->aux != (void *)2)
3865 error ("missing edge %i->%i",
3866 bb->index, label_bb->index);
3867 err = 1;
3871 FOR_EACH_EDGE (e, ei, bb->succs)
3872 e->dest->aux = (void *)0;
3875 default: ;
3879 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3880 verify_dominators (CDI_DOMINATORS);
3882 return err;
3886 /* Updates phi nodes after creating a forwarder block joined
3887 by edge FALLTHRU. */
3889 static void
3890 tree_make_forwarder_block (edge fallthru)
3892 edge e;
3893 edge_iterator ei;
3894 basic_block dummy, bb;
3895 tree phi, new_phi, var;
3897 dummy = fallthru->src;
3898 bb = fallthru->dest;
3900 if (single_pred_p (bb))
3901 return;
3903 /* If we redirected a branch we must create new phi nodes at the
3904 start of BB. */
3905 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3907 var = PHI_RESULT (phi);
3908 new_phi = create_phi_node (var, bb);
3909 SSA_NAME_DEF_STMT (var) = new_phi;
3910 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3911 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3914 /* Ensure that the PHI node chain is in the same order. */
3915 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3917 /* Add the arguments we have stored on edges. */
3918 FOR_EACH_EDGE (e, ei, bb->preds)
3920 if (e == fallthru)
3921 continue;
3923 flush_pending_stmts (e);
3928 /* Return a non-special label in the head of basic block BLOCK.
3929 Create one if it doesn't exist. */
3931 tree
3932 tree_block_label (basic_block bb)
3934 block_stmt_iterator i, s = bsi_start (bb);
3935 bool first = true;
3936 tree label, stmt;
3938 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3940 stmt = bsi_stmt (i);
3941 if (TREE_CODE (stmt) != LABEL_EXPR)
3942 break;
3943 label = LABEL_EXPR_LABEL (stmt);
3944 if (!DECL_NONLOCAL (label))
3946 if (!first)
3947 bsi_move_before (&i, &s);
3948 return label;
3952 label = create_artificial_label ();
3953 stmt = build1 (LABEL_EXPR, void_type_node, label);
3954 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3955 return label;
3959 /* Attempt to perform edge redirection by replacing a possibly complex
3960 jump instruction by a goto or by removing the jump completely.
3961 This can apply only if all edges now point to the same block. The
3962 parameters and return values are equivalent to
3963 redirect_edge_and_branch. */
3965 static edge
3966 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
3968 basic_block src = e->src;
3969 block_stmt_iterator b;
3970 tree stmt;
3972 /* We can replace or remove a complex jump only when we have exactly
3973 two edges. */
3974 if (EDGE_COUNT (src->succs) != 2
3975 /* Verify that all targets will be TARGET. Specifically, the
3976 edge that is not E must also go to TARGET. */
3977 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
3978 return NULL;
3980 b = bsi_last (src);
3981 if (bsi_end_p (b))
3982 return NULL;
3983 stmt = bsi_stmt (b);
3985 if (TREE_CODE (stmt) == COND_EXPR
3986 || TREE_CODE (stmt) == SWITCH_EXPR)
3988 bsi_remove (&b);
3989 e = ssa_redirect_edge (e, target);
3990 e->flags = EDGE_FALLTHRU;
3991 return e;
3994 return NULL;
3998 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
3999 edge representing the redirected branch. */
4001 static edge
4002 tree_redirect_edge_and_branch (edge e, basic_block dest)
4004 basic_block bb = e->src;
4005 block_stmt_iterator bsi;
4006 edge ret;
4007 tree label, stmt;
4009 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4010 return NULL;
4012 if (e->src != ENTRY_BLOCK_PTR
4013 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4014 return ret;
4016 if (e->dest == dest)
4017 return NULL;
4019 label = tree_block_label (dest);
4021 bsi = bsi_last (bb);
4022 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4024 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4026 case COND_EXPR:
4027 stmt = (e->flags & EDGE_TRUE_VALUE
4028 ? COND_EXPR_THEN (stmt)
4029 : COND_EXPR_ELSE (stmt));
4030 GOTO_DESTINATION (stmt) = label;
4031 break;
4033 case GOTO_EXPR:
4034 /* No non-abnormal edges should lead from a non-simple goto, and
4035 simple ones should be represented implicitly. */
4036 gcc_unreachable ();
4038 case SWITCH_EXPR:
4040 tree cases = get_cases_for_edge (e, stmt);
4042 /* If we have a list of cases associated with E, then use it
4043 as it's a lot faster than walking the entire case vector. */
4044 if (cases)
4046 edge e2 = find_edge (e->src, dest);
4047 tree last, first;
4049 first = cases;
4050 while (cases)
4052 last = cases;
4053 CASE_LABEL (cases) = label;
4054 cases = TREE_CHAIN (cases);
4057 /* If there was already an edge in the CFG, then we need
4058 to move all the cases associated with E to E2. */
4059 if (e2)
4061 tree cases2 = get_cases_for_edge (e2, stmt);
4063 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4064 TREE_CHAIN (cases2) = first;
4067 else
4069 tree vec = SWITCH_LABELS (stmt);
4070 size_t i, n = TREE_VEC_LENGTH (vec);
4072 for (i = 0; i < n; i++)
4074 tree elt = TREE_VEC_ELT (vec, i);
4076 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4077 CASE_LABEL (elt) = label;
4081 break;
4084 case RETURN_EXPR:
4085 bsi_remove (&bsi);
4086 e->flags |= EDGE_FALLTHRU;
4087 break;
4089 default:
4090 /* Otherwise it must be a fallthru edge, and we don't need to
4091 do anything besides redirecting it. */
4092 gcc_assert (e->flags & EDGE_FALLTHRU);
4093 break;
4096 /* Update/insert PHI nodes as necessary. */
4098 /* Now update the edges in the CFG. */
4099 e = ssa_redirect_edge (e, dest);
4101 return e;
4105 /* Simple wrapper, as we can always redirect fallthru edges. */
4107 static basic_block
4108 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4110 e = tree_redirect_edge_and_branch (e, dest);
4111 gcc_assert (e);
4113 return NULL;
4117 /* Splits basic block BB after statement STMT (but at least after the
4118 labels). If STMT is NULL, BB is split just after the labels. */
4120 static basic_block
4121 tree_split_block (basic_block bb, void *stmt)
4123 block_stmt_iterator bsi, bsi_tgt;
4124 tree act;
4125 basic_block new_bb;
4126 edge e;
4127 edge_iterator ei;
4129 new_bb = create_empty_bb (bb);
4131 /* Redirect the outgoing edges. */
4132 new_bb->succs = bb->succs;
4133 bb->succs = NULL;
4134 FOR_EACH_EDGE (e, ei, new_bb->succs)
4135 e->src = new_bb;
4137 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4138 stmt = NULL;
4140 /* Move everything from BSI to the new basic block. */
4141 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4143 act = bsi_stmt (bsi);
4144 if (TREE_CODE (act) == LABEL_EXPR)
4145 continue;
4147 if (!stmt)
4148 break;
4150 if (stmt == act)
4152 bsi_next (&bsi);
4153 break;
4157 bsi_tgt = bsi_start (new_bb);
4158 while (!bsi_end_p (bsi))
4160 act = bsi_stmt (bsi);
4161 bsi_remove (&bsi);
4162 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4165 return new_bb;
4169 /* Moves basic block BB after block AFTER. */
4171 static bool
4172 tree_move_block_after (basic_block bb, basic_block after)
4174 if (bb->prev_bb == after)
4175 return true;
4177 unlink_block (bb);
4178 link_block (bb, after);
4180 return true;
4184 /* Return true if basic_block can be duplicated. */
4186 static bool
4187 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4189 return true;
4193 /* Create a duplicate of the basic block BB. NOTE: This does not
4194 preserve SSA form. */
4196 static basic_block
4197 tree_duplicate_bb (basic_block bb)
4199 basic_block new_bb;
4200 block_stmt_iterator bsi, bsi_tgt;
4201 tree phi;
4203 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4205 /* Copy the PHI nodes. We ignore PHI node arguments here because
4206 the incoming edges have not been setup yet. */
4207 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4209 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4210 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4213 /* Keep the chain of PHI nodes in the same order so that they can be
4214 updated by ssa_redirect_edge. */
4215 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4217 bsi_tgt = bsi_start (new_bb);
4218 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4220 def_operand_p def_p;
4221 ssa_op_iter op_iter;
4222 tree stmt, copy;
4223 int region;
4225 stmt = bsi_stmt (bsi);
4226 if (TREE_CODE (stmt) == LABEL_EXPR)
4227 continue;
4229 /* Create a new copy of STMT and duplicate STMT's virtual
4230 operands. */
4231 copy = unshare_expr (stmt);
4232 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4233 copy_virtual_operands (copy, stmt);
4234 region = lookup_stmt_eh_region (stmt);
4235 if (region >= 0)
4236 add_stmt_to_eh_region (copy, region);
4238 /* Create new names for all the definitions created by COPY and
4239 add replacement mappings for each new name. */
4240 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4241 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4244 return new_bb;
4248 /* Basic block BB_COPY was created by code duplication. Add phi node
4249 arguments for edges going out of BB_COPY. The blocks that were
4250 duplicated have BB_DUPLICATED set. */
4252 void
4253 add_phi_args_after_copy_bb (basic_block bb_copy)
4255 basic_block bb, dest;
4256 edge e, e_copy;
4257 edge_iterator ei;
4258 tree phi, phi_copy, phi_next, def;
4260 bb = get_bb_original (bb_copy);
4262 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4264 if (!phi_nodes (e_copy->dest))
4265 continue;
4267 if (e_copy->dest->flags & BB_DUPLICATED)
4268 dest = get_bb_original (e_copy->dest);
4269 else
4270 dest = e_copy->dest;
4272 e = find_edge (bb, dest);
4273 if (!e)
4275 /* During loop unrolling the target of the latch edge is copied.
4276 In this case we are not looking for edge to dest, but to
4277 duplicated block whose original was dest. */
4278 FOR_EACH_EDGE (e, ei, bb->succs)
4279 if ((e->dest->flags & BB_DUPLICATED)
4280 && get_bb_original (e->dest) == dest)
4281 break;
4283 gcc_assert (e != NULL);
4286 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4287 phi;
4288 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4290 phi_next = PHI_CHAIN (phi);
4291 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4292 add_phi_arg (phi_copy, def, e_copy);
4297 /* Blocks in REGION_COPY array of length N_REGION were created by
4298 duplication of basic blocks. Add phi node arguments for edges
4299 going from these blocks. */
4301 void
4302 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4304 unsigned i;
4306 for (i = 0; i < n_region; i++)
4307 region_copy[i]->flags |= BB_DUPLICATED;
4309 for (i = 0; i < n_region; i++)
4310 add_phi_args_after_copy_bb (region_copy[i]);
4312 for (i = 0; i < n_region; i++)
4313 region_copy[i]->flags &= ~BB_DUPLICATED;
4316 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4317 important exit edge EXIT. By important we mean that no SSA name defined
4318 inside region is live over the other exit edges of the region. All entry
4319 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4320 to the duplicate of the region. SSA form is not updated, but dominance
4321 and loop information is. The new basic blocks are stored to REGION_COPY
4322 in the same order as they had in REGION, provided that REGION_COPY is not
4323 NULL. The function returns false if it is unable to copy the region,
4324 true otherwise. */
4326 bool
4327 tree_duplicate_sese_region (edge entry, edge exit,
4328 basic_block *region, unsigned n_region,
4329 basic_block *region_copy)
4331 unsigned i, n_doms;
4332 bool free_region_copy = false, copying_header = false;
4333 struct loop *loop = entry->dest->loop_father;
4334 edge exit_copy;
4335 basic_block *doms;
4336 edge redirected;
4337 int total_freq = 0, entry_freq = 0;
4338 gcov_type total_count = 0, entry_count = 0;
4340 if (!can_copy_bbs_p (region, n_region))
4341 return false;
4343 /* Some sanity checking. Note that we do not check for all possible
4344 missuses of the functions. I.e. if you ask to copy something weird,
4345 it will work, but the state of structures probably will not be
4346 correct. */
4347 for (i = 0; i < n_region; i++)
4349 /* We do not handle subloops, i.e. all the blocks must belong to the
4350 same loop. */
4351 if (region[i]->loop_father != loop)
4352 return false;
4354 if (region[i] != entry->dest
4355 && region[i] == loop->header)
4356 return false;
4359 loop->copy = loop;
4361 /* In case the function is used for loop header copying (which is the primary
4362 use), ensure that EXIT and its copy will be new latch and entry edges. */
4363 if (loop->header == entry->dest)
4365 copying_header = true;
4366 loop->copy = loop->outer;
4368 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4369 return false;
4371 for (i = 0; i < n_region; i++)
4372 if (region[i] != exit->src
4373 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4374 return false;
4377 if (!region_copy)
4379 region_copy = xmalloc (sizeof (basic_block) * n_region);
4380 free_region_copy = true;
4383 /* gcc_assert (!need_ssa_update_p ()); */
4385 /* Record blocks outside the region that are dominated by something
4386 inside. */
4387 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4388 initialize_original_copy_tables ();
4390 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4392 if (entry->dest->count)
4394 total_count = entry->dest->count;
4395 entry_count = entry->count;
4396 /* Fix up corner cases, to avoid division by zero or creation of negative
4397 frequencies. */
4398 if (entry_count > total_count)
4399 entry_count = total_count;
4401 else
4403 total_freq = entry->dest->frequency;
4404 entry_freq = EDGE_FREQUENCY (entry);
4405 /* Fix up corner cases, to avoid division by zero or creation of negative
4406 frequencies. */
4407 if (total_freq == 0)
4408 total_freq = 1;
4409 else if (entry_freq > total_freq)
4410 entry_freq = total_freq;
4413 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4414 split_edge_bb_loc (entry));
4415 if (total_count)
4417 scale_bbs_frequencies_gcov_type (region, n_region,
4418 total_count - entry_count,
4419 total_count);
4420 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4421 total_count);
4423 else
4425 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4426 total_freq);
4427 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4430 if (copying_header)
4432 loop->header = exit->dest;
4433 loop->latch = exit->src;
4436 /* Redirect the entry and add the phi node arguments. */
4437 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4438 gcc_assert (redirected != NULL);
4439 flush_pending_stmts (entry);
4441 /* Concerning updating of dominators: We must recount dominators
4442 for entry block and its copy. Anything that is outside of the
4443 region, but was dominated by something inside needs recounting as
4444 well. */
4445 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4446 doms[n_doms++] = get_bb_original (entry->dest);
4447 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4448 free (doms);
4450 /* Add the other PHI node arguments. */
4451 add_phi_args_after_copy (region_copy, n_region);
4453 if (free_region_copy)
4454 free (region_copy);
4456 free_original_copy_tables ();
4457 return true;
4461 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4463 void
4464 dump_function_to_file (tree fn, FILE *file, int flags)
4466 tree arg, vars, var;
4467 bool ignore_topmost_bind = false, any_var = false;
4468 basic_block bb;
4469 tree chain;
4471 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4473 arg = DECL_ARGUMENTS (fn);
4474 while (arg)
4476 print_generic_expr (file, arg, dump_flags);
4477 if (TREE_CHAIN (arg))
4478 fprintf (file, ", ");
4479 arg = TREE_CHAIN (arg);
4481 fprintf (file, ")\n");
4483 if (flags & TDF_DETAILS)
4484 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4485 if (flags & TDF_RAW)
4487 dump_node (fn, TDF_SLIM | flags, file);
4488 return;
4491 /* When GIMPLE is lowered, the variables are no longer available in
4492 BIND_EXPRs, so display them separately. */
4493 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4495 ignore_topmost_bind = true;
4497 fprintf (file, "{\n");
4498 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4500 var = TREE_VALUE (vars);
4502 print_generic_decl (file, var, flags);
4503 fprintf (file, "\n");
4505 any_var = true;
4509 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4511 /* Make a CFG based dump. */
4512 check_bb_profile (ENTRY_BLOCK_PTR, file);
4513 if (!ignore_topmost_bind)
4514 fprintf (file, "{\n");
4516 if (any_var && n_basic_blocks)
4517 fprintf (file, "\n");
4519 FOR_EACH_BB (bb)
4520 dump_generic_bb (file, bb, 2, flags);
4522 fprintf (file, "}\n");
4523 check_bb_profile (EXIT_BLOCK_PTR, file);
4525 else
4527 int indent;
4529 /* Make a tree based dump. */
4530 chain = DECL_SAVED_TREE (fn);
4532 if (TREE_CODE (chain) == BIND_EXPR)
4534 if (ignore_topmost_bind)
4536 chain = BIND_EXPR_BODY (chain);
4537 indent = 2;
4539 else
4540 indent = 0;
4542 else
4544 if (!ignore_topmost_bind)
4545 fprintf (file, "{\n");
4546 indent = 2;
4549 if (any_var)
4550 fprintf (file, "\n");
4552 print_generic_stmt_indented (file, chain, flags, indent);
4553 if (ignore_topmost_bind)
4554 fprintf (file, "}\n");
4557 fprintf (file, "\n\n");
4561 /* Pretty print of the loops intermediate representation. */
4562 static void print_loop (FILE *, struct loop *, int);
4563 static void print_pred_bbs (FILE *, basic_block bb);
4564 static void print_succ_bbs (FILE *, basic_block bb);
4567 /* Print on FILE the indexes for the predecessors of basic_block BB. */
4569 static void
4570 print_pred_bbs (FILE *file, basic_block bb)
4572 edge e;
4573 edge_iterator ei;
4575 FOR_EACH_EDGE (e, ei, bb->preds)
4576 fprintf (file, "bb_%d ", e->src->index);
4580 /* Print on FILE the indexes for the successors of basic_block BB. */
4582 static void
4583 print_succ_bbs (FILE *file, basic_block bb)
4585 edge e;
4586 edge_iterator ei;
4588 FOR_EACH_EDGE (e, ei, bb->succs)
4589 fprintf (file, "bb_%d ", e->dest->index);
4593 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4595 static void
4596 print_loop (FILE *file, struct loop *loop, int indent)
4598 char *s_indent;
4599 basic_block bb;
4601 if (loop == NULL)
4602 return;
4604 s_indent = (char *) alloca ((size_t) indent + 1);
4605 memset ((void *) s_indent, ' ', (size_t) indent);
4606 s_indent[indent] = '\0';
4608 /* Print the loop's header. */
4609 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4611 /* Print the loop's body. */
4612 fprintf (file, "%s{\n", s_indent);
4613 FOR_EACH_BB (bb)
4614 if (bb->loop_father == loop)
4616 /* Print the basic_block's header. */
4617 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
4618 print_pred_bbs (file, bb);
4619 fprintf (file, "}, succs = {");
4620 print_succ_bbs (file, bb);
4621 fprintf (file, "})\n");
4623 /* Print the basic_block's body. */
4624 fprintf (file, "%s {\n", s_indent);
4625 tree_dump_bb (bb, file, indent + 4);
4626 fprintf (file, "%s }\n", s_indent);
4629 print_loop (file, loop->inner, indent + 2);
4630 fprintf (file, "%s}\n", s_indent);
4631 print_loop (file, loop->next, indent);
4635 /* Follow a CFG edge from the entry point of the program, and on entry
4636 of a loop, pretty print the loop structure on FILE. */
4638 void
4639 print_loop_ir (FILE *file)
4641 basic_block bb;
4643 bb = BASIC_BLOCK (0);
4644 if (bb && bb->loop_father)
4645 print_loop (file, bb->loop_father, 0);
4649 /* Debugging loops structure at tree level. */
4651 void
4652 debug_loop_ir (void)
4654 print_loop_ir (stderr);
4658 /* Return true if BB ends with a call, possibly followed by some
4659 instructions that must stay with the call. Return false,
4660 otherwise. */
4662 static bool
4663 tree_block_ends_with_call_p (basic_block bb)
4665 block_stmt_iterator bsi = bsi_last (bb);
4666 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4670 /* Return true if BB ends with a conditional branch. Return false,
4671 otherwise. */
4673 static bool
4674 tree_block_ends_with_condjump_p (basic_block bb)
4676 tree stmt = last_stmt (bb);
4677 return (stmt && TREE_CODE (stmt) == COND_EXPR);
4681 /* Return true if we need to add fake edge to exit at statement T.
4682 Helper function for tree_flow_call_edges_add. */
4684 static bool
4685 need_fake_edge_p (tree t)
4687 tree call;
4689 /* NORETURN and LONGJMP calls already have an edge to exit.
4690 CONST and PURE calls do not need one.
4691 We don't currently check for CONST and PURE here, although
4692 it would be a good idea, because those attributes are
4693 figured out from the RTL in mark_constant_function, and
4694 the counter incrementation code from -fprofile-arcs
4695 leads to different results from -fbranch-probabilities. */
4696 call = get_call_expr_in (t);
4697 if (call
4698 && !(call_expr_flags (call) & ECF_NORETURN))
4699 return true;
4701 if (TREE_CODE (t) == ASM_EXPR
4702 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4703 return true;
4705 return false;
4709 /* Add fake edges to the function exit for any non constant and non
4710 noreturn calls, volatile inline assembly in the bitmap of blocks
4711 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4712 the number of blocks that were split.
4714 The goal is to expose cases in which entering a basic block does
4715 not imply that all subsequent instructions must be executed. */
4717 static int
4718 tree_flow_call_edges_add (sbitmap blocks)
4720 int i;
4721 int blocks_split = 0;
4722 int last_bb = last_basic_block;
4723 bool check_last_block = false;
4725 if (n_basic_blocks == 0)
4726 return 0;
4728 if (! blocks)
4729 check_last_block = true;
4730 else
4731 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4733 /* In the last basic block, before epilogue generation, there will be
4734 a fallthru edge to EXIT. Special care is required if the last insn
4735 of the last basic block is a call because make_edge folds duplicate
4736 edges, which would result in the fallthru edge also being marked
4737 fake, which would result in the fallthru edge being removed by
4738 remove_fake_edges, which would result in an invalid CFG.
4740 Moreover, we can't elide the outgoing fake edge, since the block
4741 profiler needs to take this into account in order to solve the minimal
4742 spanning tree in the case that the call doesn't return.
4744 Handle this by adding a dummy instruction in a new last basic block. */
4745 if (check_last_block)
4747 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4748 block_stmt_iterator bsi = bsi_last (bb);
4749 tree t = NULL_TREE;
4750 if (!bsi_end_p (bsi))
4751 t = bsi_stmt (bsi);
4753 if (t && need_fake_edge_p (t))
4755 edge e;
4757 e = find_edge (bb, EXIT_BLOCK_PTR);
4758 if (e)
4760 bsi_insert_on_edge (e, build_empty_stmt ());
4761 bsi_commit_edge_inserts ();
4766 /* Now add fake edges to the function exit for any non constant
4767 calls since there is no way that we can determine if they will
4768 return or not... */
4769 for (i = 0; i < last_bb; i++)
4771 basic_block bb = BASIC_BLOCK (i);
4772 block_stmt_iterator bsi;
4773 tree stmt, last_stmt;
4775 if (!bb)
4776 continue;
4778 if (blocks && !TEST_BIT (blocks, i))
4779 continue;
4781 bsi = bsi_last (bb);
4782 if (!bsi_end_p (bsi))
4784 last_stmt = bsi_stmt (bsi);
4787 stmt = bsi_stmt (bsi);
4788 if (need_fake_edge_p (stmt))
4790 edge e;
4791 /* The handling above of the final block before the
4792 epilogue should be enough to verify that there is
4793 no edge to the exit block in CFG already.
4794 Calling make_edge in such case would cause us to
4795 mark that edge as fake and remove it later. */
4796 #ifdef ENABLE_CHECKING
4797 if (stmt == last_stmt)
4799 e = find_edge (bb, EXIT_BLOCK_PTR);
4800 gcc_assert (e == NULL);
4802 #endif
4804 /* Note that the following may create a new basic block
4805 and renumber the existing basic blocks. */
4806 if (stmt != last_stmt)
4808 e = split_block (bb, stmt);
4809 if (e)
4810 blocks_split++;
4812 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4814 bsi_prev (&bsi);
4816 while (!bsi_end_p (bsi));
4820 if (blocks_split)
4821 verify_flow_info ();
4823 return blocks_split;
4826 bool
4827 tree_purge_dead_eh_edges (basic_block bb)
4829 bool changed = false;
4830 edge e;
4831 edge_iterator ei;
4832 tree stmt = last_stmt (bb);
4834 if (stmt && tree_can_throw_internal (stmt))
4835 return false;
4837 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4839 if (e->flags & EDGE_EH)
4841 remove_edge (e);
4842 changed = true;
4844 else
4845 ei_next (&ei);
4848 /* Removal of dead EH edges might change dominators of not
4849 just immediate successors. E.g. when bb1 is changed so that
4850 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
4851 eh edges purged by this function in:
4855 1-->2
4856 / \ |
4857 v v |
4858 3-->4 |
4860 --->5
4863 idom(bb5) must be recomputed. For now just free the dominance
4864 info. */
4865 if (changed)
4866 free_dominance_info (CDI_DOMINATORS);
4868 return changed;
4871 bool
4872 tree_purge_all_dead_eh_edges (bitmap blocks)
4874 bool changed = false;
4875 unsigned i;
4876 bitmap_iterator bi;
4878 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
4880 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
4883 return changed;
4886 /* This function is called whenever a new edge is created or
4887 redirected. */
4889 static void
4890 tree_execute_on_growing_pred (edge e)
4892 basic_block bb = e->dest;
4894 if (phi_nodes (bb))
4895 reserve_phi_args_for_new_edge (bb);
4898 /* This function is called immediately before edge E is removed from
4899 the edge vector E->dest->preds. */
4901 static void
4902 tree_execute_on_shrinking_pred (edge e)
4904 if (phi_nodes (e->dest))
4905 remove_phi_args (e);
4908 /*---------------------------------------------------------------------------
4909 Helper functions for Loop versioning
4910 ---------------------------------------------------------------------------*/
4912 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
4913 of 'first'. Both of them are dominated by 'new_head' basic block. When
4914 'new_head' was created by 'second's incoming edge it received phi arguments
4915 on the edge by split_edge(). Later, additional edge 'e' was created to
4916 connect 'new_head' and 'first'. Now this routine adds phi args on this
4917 additional edge 'e' that new_head to second edge received as part of edge
4918 splitting.
4921 static void
4922 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
4923 basic_block new_head, edge e)
4925 tree phi1, phi2;
4926 edge e2 = find_edge (new_head, second);
4928 /* Because NEW_HEAD has been created by splitting SECOND's incoming
4929 edge, we should always have an edge from NEW_HEAD to SECOND. */
4930 gcc_assert (e2 != NULL);
4932 /* Browse all 'second' basic block phi nodes and add phi args to
4933 edge 'e' for 'first' head. PHI args are always in correct order. */
4935 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
4936 phi2 && phi1;
4937 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
4939 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
4940 add_phi_arg (phi1, def, e);
4944 /* Adds a if else statement to COND_BB with condition COND_EXPR.
4945 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
4946 the destination of the ELSE part. */
4947 static void
4948 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
4949 basic_block cond_bb, void *cond_e)
4951 block_stmt_iterator bsi;
4952 tree goto1 = NULL_TREE;
4953 tree goto2 = NULL_TREE;
4954 tree new_cond_expr = NULL_TREE;
4955 tree cond_expr = (tree) cond_e;
4956 edge e0;
4958 /* Build new conditional expr */
4959 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
4960 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
4961 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
4963 /* Add new cond in cond_bb. */
4964 bsi = bsi_start (cond_bb);
4965 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
4966 /* Adjust edges appropriately to connect new head with first head
4967 as well as second head. */
4968 e0 = single_succ_edge (cond_bb);
4969 e0->flags &= ~EDGE_FALLTHRU;
4970 e0->flags |= EDGE_FALSE_VALUE;
4973 struct cfg_hooks tree_cfg_hooks = {
4974 "tree",
4975 tree_verify_flow_info,
4976 tree_dump_bb, /* dump_bb */
4977 create_bb, /* create_basic_block */
4978 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
4979 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
4980 remove_bb, /* delete_basic_block */
4981 tree_split_block, /* split_block */
4982 tree_move_block_after, /* move_block_after */
4983 tree_can_merge_blocks_p, /* can_merge_blocks_p */
4984 tree_merge_blocks, /* merge_blocks */
4985 tree_predict_edge, /* predict_edge */
4986 tree_predicted_by_p, /* predicted_by_p */
4987 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
4988 tree_duplicate_bb, /* duplicate_block */
4989 tree_split_edge, /* split_edge */
4990 tree_make_forwarder_block, /* make_forward_block */
4991 NULL, /* tidy_fallthru_edge */
4992 tree_block_ends_with_call_p, /* block_ends_with_call_p */
4993 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
4994 tree_flow_call_edges_add, /* flow_call_edges_add */
4995 tree_execute_on_growing_pred, /* execute_on_growing_pred */
4996 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
4997 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
4998 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4999 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5000 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5001 flush_pending_stmts /* flush_pending_stmts */
5005 /* Split all critical edges. */
5007 static void
5008 split_critical_edges (void)
5010 basic_block bb;
5011 edge e;
5012 edge_iterator ei;
5014 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5015 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5016 mappings around the calls to split_edge. */
5017 start_recording_case_labels ();
5018 FOR_ALL_BB (bb)
5020 FOR_EACH_EDGE (e, ei, bb->succs)
5021 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5023 split_edge (e);
5026 end_recording_case_labels ();
5029 struct tree_opt_pass pass_split_crit_edges =
5031 "crited", /* name */
5032 NULL, /* gate */
5033 split_critical_edges, /* execute */
5034 NULL, /* sub */
5035 NULL, /* next */
5036 0, /* static_pass_number */
5037 TV_TREE_SPLIT_EDGES, /* tv_id */
5038 PROP_cfg, /* properties required */
5039 PROP_no_crit_edges, /* properties_provided */
5040 0, /* properties_destroyed */
5041 0, /* todo_flags_start */
5042 TODO_dump_func, /* todo_flags_finish */
5043 0 /* letter */
5047 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5048 a temporary, make sure and register it to be renamed if necessary,
5049 and finally return the temporary. Put the statements to compute
5050 EXP before the current statement in BSI. */
5052 tree
5053 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5055 tree t, new_stmt, orig_stmt;
5057 if (is_gimple_val (exp))
5058 return exp;
5060 t = make_rename_temp (type, NULL);
5061 new_stmt = build (MODIFY_EXPR, type, t, exp);
5063 orig_stmt = bsi_stmt (*bsi);
5064 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5065 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5067 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5069 return t;
5072 /* Build a ternary operation and gimplify it. Emit code before BSI.
5073 Return the gimple_val holding the result. */
5075 tree
5076 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5077 tree type, tree a, tree b, tree c)
5079 tree ret;
5081 ret = fold_build3 (code, type, a, b, c);
5082 STRIP_NOPS (ret);
5084 return gimplify_val (bsi, type, ret);
5087 /* Build a binary operation and gimplify it. Emit code before BSI.
5088 Return the gimple_val holding the result. */
5090 tree
5091 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5092 tree type, tree a, tree b)
5094 tree ret;
5096 ret = fold_build2 (code, type, a, b);
5097 STRIP_NOPS (ret);
5099 return gimplify_val (bsi, type, ret);
5102 /* Build a unary operation and gimplify it. Emit code before BSI.
5103 Return the gimple_val holding the result. */
5105 tree
5106 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5107 tree a)
5109 tree ret;
5111 ret = fold_build1 (code, type, a);
5112 STRIP_NOPS (ret);
5114 return gimplify_val (bsi, type, ret);
5119 /* Emit return warnings. */
5121 static void
5122 execute_warn_function_return (void)
5124 #ifdef USE_MAPPED_LOCATION
5125 source_location location;
5126 #else
5127 location_t *locus;
5128 #endif
5129 tree last;
5130 edge e;
5131 edge_iterator ei;
5133 /* If we have a path to EXIT, then we do return. */
5134 if (TREE_THIS_VOLATILE (cfun->decl)
5135 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5137 #ifdef USE_MAPPED_LOCATION
5138 location = UNKNOWN_LOCATION;
5139 #else
5140 locus = NULL;
5141 #endif
5142 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5144 last = last_stmt (e->src);
5145 if (TREE_CODE (last) == RETURN_EXPR
5146 #ifdef USE_MAPPED_LOCATION
5147 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5148 #else
5149 && (locus = EXPR_LOCUS (last)) != NULL)
5150 #endif
5151 break;
5153 #ifdef USE_MAPPED_LOCATION
5154 if (location == UNKNOWN_LOCATION)
5155 location = cfun->function_end_locus;
5156 warning (0, "%H%<noreturn%> function does return", &location);
5157 #else
5158 if (!locus)
5159 locus = &cfun->function_end_locus;
5160 warning (0, "%H%<noreturn%> function does return", locus);
5161 #endif
5164 /* If we see "return;" in some basic block, then we do reach the end
5165 without returning a value. */
5166 else if (warn_return_type
5167 && !TREE_NO_WARNING (cfun->decl)
5168 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5169 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5171 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5173 tree last = last_stmt (e->src);
5174 if (TREE_CODE (last) == RETURN_EXPR
5175 && TREE_OPERAND (last, 0) == NULL
5176 && !TREE_NO_WARNING (last))
5178 #ifdef USE_MAPPED_LOCATION
5179 location = EXPR_LOCATION (last);
5180 if (location == UNKNOWN_LOCATION)
5181 location = cfun->function_end_locus;
5182 warning (0, "%Hcontrol reaches end of non-void function", &location);
5183 #else
5184 locus = EXPR_LOCUS (last);
5185 if (!locus)
5186 locus = &cfun->function_end_locus;
5187 warning (0, "%Hcontrol reaches end of non-void function", locus);
5188 #endif
5189 TREE_NO_WARNING (cfun->decl) = 1;
5190 break;
5197 /* Given a basic block B which ends with a conditional and has
5198 precisely two successors, determine which of the edges is taken if
5199 the conditional is true and which is taken if the conditional is
5200 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5202 void
5203 extract_true_false_edges_from_block (basic_block b,
5204 edge *true_edge,
5205 edge *false_edge)
5207 edge e = EDGE_SUCC (b, 0);
5209 if (e->flags & EDGE_TRUE_VALUE)
5211 *true_edge = e;
5212 *false_edge = EDGE_SUCC (b, 1);
5214 else
5216 *false_edge = e;
5217 *true_edge = EDGE_SUCC (b, 1);
5221 struct tree_opt_pass pass_warn_function_return =
5223 NULL, /* name */
5224 NULL, /* gate */
5225 execute_warn_function_return, /* execute */
5226 NULL, /* sub */
5227 NULL, /* next */
5228 0, /* static_pass_number */
5229 0, /* tv_id */
5230 PROP_cfg, /* properties_required */
5231 0, /* properties_provided */
5232 0, /* properties_destroyed */
5233 0, /* todo_flags_start */
5234 0, /* todo_flags_finish */
5235 0 /* letter */
5238 /* Emit noreturn warnings. */
5240 static void
5241 execute_warn_function_noreturn (void)
5243 if (warn_missing_noreturn
5244 && !TREE_THIS_VOLATILE (cfun->decl)
5245 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5246 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5247 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5248 "for attribute %<noreturn%>",
5249 cfun->decl);
5252 struct tree_opt_pass pass_warn_function_noreturn =
5254 NULL, /* name */
5255 NULL, /* gate */
5256 execute_warn_function_noreturn, /* execute */
5257 NULL, /* sub */
5258 NULL, /* next */
5259 0, /* static_pass_number */
5260 0, /* tv_id */
5261 PROP_cfg, /* properties_required */
5262 0, /* properties_provided */
5263 0, /* properties_destroyed */
5264 0, /* todo_flags_start */
5265 0, /* todo_flags_finish */
5266 0 /* letter */