PR target/30413
[official-gcc.git] / gcc / tree-cfg.c
blobe4971384438f03336d1365378a0a1b9fcb88dff7
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
49 #include "value-prof.h"
50 #include "pointer-set.h"
52 /* This file contains functions for building the Control Flow Graph (CFG)
53 for a function tree. */
55 /* Local declarations. */
57 /* Initial capacity for the basic block array. */
58 static const int initial_cfg_capacity = 20;
60 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
61 which use a particular edge. The CASE_LABEL_EXPRs are chained together
62 via their TREE_CHAIN field, which we clear after we're done with the
63 hash table to prevent problems with duplication of SWITCH_EXPRs.
65 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
66 update the case vector in response to edge redirections.
68 Right now this table is set up and torn down at key points in the
69 compilation process. It would be nice if we could make the table
70 more persistent. The key is getting notification of changes to
71 the CFG (particularly edge removal, creation and redirection). */
73 struct edge_to_cases_elt
75 /* The edge itself. Necessary for hashing and equality tests. */
76 edge e;
78 /* The case labels associated with this edge. We link these up via
79 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
80 when we destroy the hash table. This prevents problems when copying
81 SWITCH_EXPRs. */
82 tree case_labels;
85 static htab_t edge_to_cases;
87 /* CFG statistics. */
88 struct cfg_stats_d
90 long num_merged_labels;
93 static struct cfg_stats_d cfg_stats;
95 /* Nonzero if we found a computed goto while building basic blocks. */
96 static bool found_computed_goto;
98 /* Basic blocks and flowgraphs. */
99 static basic_block create_bb (void *, void *, basic_block);
100 static void make_blocks (tree);
101 static void factor_computed_gotos (void);
103 /* Edges. */
104 static void make_edges (void);
105 static void make_cond_expr_edges (basic_block);
106 static void make_switch_expr_edges (basic_block);
107 static void make_goto_expr_edges (basic_block);
108 static edge tree_redirect_edge_and_branch (edge, basic_block);
109 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
110 static unsigned int split_critical_edges (void);
112 /* Various helpers. */
113 static inline bool stmt_starts_bb_p (tree, tree);
114 static int tree_verify_flow_info (void);
115 static void tree_make_forwarder_block (edge);
116 static void tree_cfg2vcg (FILE *);
117 static inline void change_bb_for_stmt (tree t, basic_block bb);
119 /* Flowgraph optimization and cleanup. */
120 static void tree_merge_blocks (basic_block, basic_block);
121 static bool tree_can_merge_blocks_p (basic_block, basic_block);
122 static void remove_bb (basic_block);
123 static edge find_taken_edge_computed_goto (basic_block, tree);
124 static edge find_taken_edge_cond_expr (basic_block, tree);
125 static edge find_taken_edge_switch_expr (basic_block, tree);
126 static tree find_case_label_for_value (tree, tree);
128 void
129 init_empty_tree_cfg (void)
131 /* Initialize the basic block array. */
132 init_flow ();
133 profile_status = PROFILE_ABSENT;
134 n_basic_blocks = NUM_FIXED_BLOCKS;
135 last_basic_block = NUM_FIXED_BLOCKS;
136 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
137 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
138 initial_cfg_capacity);
140 /* Build a mapping of labels to their associated blocks. */
141 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
143 initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
195 /* Create the edges of the flowgraph. */
196 make_edges ();
198 /* Debugging dumps. */
200 /* Write the flowgraph to a VCG file. */
202 int local_dump_flags;
203 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
204 if (vcg_file)
206 tree_cfg2vcg (vcg_file);
207 dump_end (TDI_vcg, vcg_file);
211 #ifdef ENABLE_CHECKING
212 verify_stmts ();
213 #endif
215 /* Dump a textual representation of the flowgraph. */
216 if (dump_file)
217 dump_tree_cfg (dump_file, dump_flags);
220 static unsigned int
221 execute_build_cfg (void)
223 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
224 return 0;
227 struct tree_opt_pass pass_build_cfg =
229 "cfg", /* name */
230 NULL, /* gate */
231 execute_build_cfg, /* execute */
232 NULL, /* sub */
233 NULL, /* next */
234 0, /* static_pass_number */
235 TV_TREE_CFG, /* tv_id */
236 PROP_gimple_leh, /* properties_required */
237 PROP_cfg, /* properties_provided */
238 0, /* properties_destroyed */
239 0, /* todo_flags_start */
240 TODO_verify_stmts, /* todo_flags_finish */
241 0 /* letter */
244 /* Search the CFG for any computed gotos. If found, factor them to a
245 common computed goto site. Also record the location of that site so
246 that we can un-factor the gotos after we have converted back to
247 normal form. */
249 static void
250 factor_computed_gotos (void)
252 basic_block bb;
253 tree factored_label_decl = NULL;
254 tree var = NULL;
255 tree factored_computed_goto_label = NULL;
256 tree factored_computed_goto = NULL;
258 /* We know there are one or more computed gotos in this function.
259 Examine the last statement in each basic block to see if the block
260 ends with a computed goto. */
262 FOR_EACH_BB (bb)
264 block_stmt_iterator bsi = bsi_last (bb);
265 tree last;
267 if (bsi_end_p (bsi))
268 continue;
269 last = bsi_stmt (bsi);
271 /* Ignore the computed goto we create when we factor the original
272 computed gotos. */
273 if (last == factored_computed_goto)
274 continue;
276 /* If the last statement is a computed goto, factor it. */
277 if (computed_goto_p (last))
279 tree assignment;
281 /* The first time we find a computed goto we need to create
282 the factored goto block and the variable each original
283 computed goto will use for their goto destination. */
284 if (! factored_computed_goto)
286 basic_block new_bb = create_empty_bb (bb);
287 block_stmt_iterator new_bsi = bsi_start (new_bb);
289 /* Create the destination of the factored goto. Each original
290 computed goto will put its desired destination into this
291 variable and jump to the label we create immediately
292 below. */
293 var = create_tmp_var (ptr_type_node, "gotovar");
295 /* Build a label for the new block which will contain the
296 factored computed goto. */
297 factored_label_decl = create_artificial_label ();
298 factored_computed_goto_label
299 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
300 bsi_insert_after (&new_bsi, factored_computed_goto_label,
301 BSI_NEW_STMT);
303 /* Build our new computed goto. */
304 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
305 bsi_insert_after (&new_bsi, factored_computed_goto,
306 BSI_NEW_STMT);
309 /* Copy the original computed goto's destination into VAR. */
310 assignment = build2_gimple (GIMPLE_MODIFY_STMT,
311 var, GOTO_DESTINATION (last));
312 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
314 /* And re-vector the computed goto to the new destination. */
315 GOTO_DESTINATION (last) = factored_label_decl;
321 /* Build a flowgraph for the statement_list STMT_LIST. */
323 static void
324 make_blocks (tree stmt_list)
326 tree_stmt_iterator i = tsi_start (stmt_list);
327 tree stmt = NULL;
328 bool start_new_block = true;
329 bool first_stmt_of_list = true;
330 basic_block bb = ENTRY_BLOCK_PTR;
332 while (!tsi_end_p (i))
334 tree prev_stmt;
336 prev_stmt = stmt;
337 stmt = tsi_stmt (i);
339 /* If the statement starts a new basic block or if we have determined
340 in a previous pass that we need to create a new block for STMT, do
341 so now. */
342 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
344 if (!first_stmt_of_list)
345 stmt_list = tsi_split_statement_list_before (&i);
346 bb = create_basic_block (stmt_list, NULL, bb);
347 start_new_block = false;
350 /* Now add STMT to BB and create the subgraphs for special statement
351 codes. */
352 set_bb_for_stmt (stmt, bb);
354 if (computed_goto_p (stmt))
355 found_computed_goto = true;
357 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
358 next iteration. */
359 if (stmt_ends_bb_p (stmt))
360 start_new_block = true;
362 tsi_next (&i);
363 first_stmt_of_list = false;
368 /* Create and return a new empty basic block after bb AFTER. */
370 static basic_block
371 create_bb (void *h, void *e, basic_block after)
373 basic_block bb;
375 gcc_assert (!e);
377 /* Create and initialize a new basic block. Since alloc_block uses
378 ggc_alloc_cleared to allocate a basic block, we do not have to
379 clear the newly allocated basic block here. */
380 bb = alloc_block ();
382 bb->index = last_basic_block;
383 bb->flags = BB_NEW;
384 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
386 /* Add the new block to the linked list of blocks. */
387 link_block (bb, after);
389 /* Grow the basic block array if needed. */
390 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
392 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
393 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
396 /* Add the newly created block to the array. */
397 SET_BASIC_BLOCK (last_basic_block, bb);
399 n_basic_blocks++;
400 last_basic_block++;
402 return bb;
406 /*---------------------------------------------------------------------------
407 Edge creation
408 ---------------------------------------------------------------------------*/
410 /* Fold COND_EXPR_COND of each COND_EXPR. */
412 void
413 fold_cond_expr_cond (void)
415 basic_block bb;
417 FOR_EACH_BB (bb)
419 tree stmt = last_stmt (bb);
421 if (stmt
422 && TREE_CODE (stmt) == COND_EXPR)
424 tree cond = fold (COND_EXPR_COND (stmt));
425 if (integer_zerop (cond))
426 COND_EXPR_COND (stmt) = boolean_false_node;
427 else if (integer_onep (cond))
428 COND_EXPR_COND (stmt) = boolean_true_node;
433 /* Join all the blocks in the flowgraph. */
435 static void
436 make_edges (void)
438 basic_block bb;
439 struct omp_region *cur_region = NULL;
441 /* Create an edge from entry to the first block with executable
442 statements in it. */
443 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
445 /* Traverse the basic block array placing edges. */
446 FOR_EACH_BB (bb)
448 tree last = last_stmt (bb);
449 bool fallthru;
451 if (last)
453 enum tree_code code = TREE_CODE (last);
454 switch (code)
456 case GOTO_EXPR:
457 make_goto_expr_edges (bb);
458 fallthru = false;
459 break;
460 case RETURN_EXPR:
461 make_edge (bb, EXIT_BLOCK_PTR, 0);
462 fallthru = false;
463 break;
464 case COND_EXPR:
465 make_cond_expr_edges (bb);
466 fallthru = false;
467 break;
468 case SWITCH_EXPR:
469 make_switch_expr_edges (bb);
470 fallthru = false;
471 break;
472 case RESX_EXPR:
473 make_eh_edges (last);
474 fallthru = false;
475 break;
477 case CALL_EXPR:
478 /* If this function receives a nonlocal goto, then we need to
479 make edges from this call site to all the nonlocal goto
480 handlers. */
481 if (tree_can_make_abnormal_goto (last))
482 make_abnormal_goto_edges (bb, true);
484 /* If this statement has reachable exception handlers, then
485 create abnormal edges to them. */
486 make_eh_edges (last);
488 /* Some calls are known not to return. */
489 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
490 break;
492 case MODIFY_EXPR:
493 gcc_unreachable ();
495 case GIMPLE_MODIFY_STMT:
496 if (is_ctrl_altering_stmt (last))
498 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
499 the CALL_EXPR may have an abnormal edge. Search the RHS
500 for this case and create any required edges. */
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
504 make_eh_edges (last);
506 fallthru = true;
507 break;
509 case OMP_PARALLEL:
510 case OMP_FOR:
511 case OMP_SINGLE:
512 case OMP_MASTER:
513 case OMP_ORDERED:
514 case OMP_CRITICAL:
515 case OMP_SECTION:
516 cur_region = new_omp_region (bb, code, cur_region);
517 fallthru = true;
518 break;
520 case OMP_SECTIONS:
521 cur_region = new_omp_region (bb, code, cur_region);
522 fallthru = false;
523 break;
525 case OMP_RETURN:
526 /* In the case of an OMP_SECTION, the edge will go somewhere
527 other than the next block. This will be created later. */
528 cur_region->exit = bb;
529 fallthru = cur_region->type != OMP_SECTION;
530 cur_region = cur_region->outer;
531 break;
533 case OMP_CONTINUE:
534 cur_region->cont = bb;
535 switch (cur_region->type)
537 case OMP_FOR:
538 /* ??? Technically there should be a some sort of loopback
539 edge here, but it goes to a block that doesn't exist yet,
540 and without it, updating the ssa form would be a real
541 bear. Fortunately, we don't yet do ssa before expanding
542 these nodes. */
543 break;
545 case OMP_SECTIONS:
546 /* Wire up the edges into and out of the nested sections. */
547 /* ??? Similarly wrt loopback. */
549 struct omp_region *i;
550 for (i = cur_region->inner; i ; i = i->next)
552 gcc_assert (i->type == OMP_SECTION);
553 make_edge (cur_region->entry, i->entry, 0);
554 make_edge (i->exit, bb, EDGE_FALLTHRU);
557 break;
559 default:
560 gcc_unreachable ();
562 fallthru = true;
563 break;
565 default:
566 gcc_assert (!stmt_ends_bb_p (last));
567 fallthru = true;
570 else
571 fallthru = true;
573 if (fallthru)
574 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
577 if (root_omp_region)
578 free_omp_regions ();
580 /* Fold COND_EXPR_COND of each COND_EXPR. */
581 fold_cond_expr_cond ();
583 /* Clean up the graph and warn for unreachable code. */
584 cleanup_tree_cfg ();
588 /* Create the edges for a COND_EXPR starting at block BB.
589 At this point, both clauses must contain only simple gotos. */
591 static void
592 make_cond_expr_edges (basic_block bb)
594 tree entry = last_stmt (bb);
595 basic_block then_bb, else_bb;
596 tree then_label, else_label;
597 edge e;
599 gcc_assert (entry);
600 gcc_assert (TREE_CODE (entry) == COND_EXPR);
602 /* Entry basic blocks for each component. */
603 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
604 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
605 then_bb = label_to_block (then_label);
606 else_bb = label_to_block (else_label);
608 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
609 #ifdef USE_MAPPED_LOCATION
610 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
611 #else
612 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
613 #endif
614 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
615 if (e)
617 #ifdef USE_MAPPED_LOCATION
618 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
619 #else
620 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
621 #endif
625 /* Hashing routine for EDGE_TO_CASES. */
627 static hashval_t
628 edge_to_cases_hash (const void *p)
630 edge e = ((struct edge_to_cases_elt *)p)->e;
632 /* Hash on the edge itself (which is a pointer). */
633 return htab_hash_pointer (e);
636 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
637 for equality is just a pointer comparison. */
639 static int
640 edge_to_cases_eq (const void *p1, const void *p2)
642 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
643 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
645 return e1 == e2;
648 /* Called for each element in the hash table (P) as we delete the
649 edge to cases hash table.
651 Clear all the TREE_CHAINs to prevent problems with copying of
652 SWITCH_EXPRs and structure sharing rules, then free the hash table
653 element. */
655 static void
656 edge_to_cases_cleanup (void *p)
658 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
659 tree t, next;
661 for (t = elt->case_labels; t; t = next)
663 next = TREE_CHAIN (t);
664 TREE_CHAIN (t) = NULL;
666 free (p);
669 /* Start recording information mapping edges to case labels. */
671 void
672 start_recording_case_labels (void)
674 gcc_assert (edge_to_cases == NULL);
676 edge_to_cases = htab_create (37,
677 edge_to_cases_hash,
678 edge_to_cases_eq,
679 edge_to_cases_cleanup);
682 /* Return nonzero if we are recording information for case labels. */
684 static bool
685 recording_case_labels_p (void)
687 return (edge_to_cases != NULL);
690 /* Stop recording information mapping edges to case labels and
691 remove any information we have recorded. */
692 void
693 end_recording_case_labels (void)
695 htab_delete (edge_to_cases);
696 edge_to_cases = NULL;
699 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
701 static void
702 record_switch_edge (edge e, tree case_label)
704 struct edge_to_cases_elt *elt;
705 void **slot;
707 /* Build a hash table element so we can see if E is already
708 in the table. */
709 elt = XNEW (struct edge_to_cases_elt);
710 elt->e = e;
711 elt->case_labels = case_label;
713 slot = htab_find_slot (edge_to_cases, elt, INSERT);
715 if (*slot == NULL)
717 /* E was not in the hash table. Install E into the hash table. */
718 *slot = (void *)elt;
720 else
722 /* E was already in the hash table. Free ELT as we do not need it
723 anymore. */
724 free (elt);
726 /* Get the entry stored in the hash table. */
727 elt = (struct edge_to_cases_elt *) *slot;
729 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
730 TREE_CHAIN (case_label) = elt->case_labels;
731 elt->case_labels = case_label;
735 /* If we are inside a {start,end}_recording_cases block, then return
736 a chain of CASE_LABEL_EXPRs from T which reference E.
738 Otherwise return NULL. */
740 static tree
741 get_cases_for_edge (edge e, tree t)
743 struct edge_to_cases_elt elt, *elt_p;
744 void **slot;
745 size_t i, n;
746 tree vec;
748 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
749 chains available. Return NULL so the caller can detect this case. */
750 if (!recording_case_labels_p ())
751 return NULL;
753 restart:
754 elt.e = e;
755 elt.case_labels = NULL;
756 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
758 if (slot)
760 elt_p = (struct edge_to_cases_elt *)*slot;
761 return elt_p->case_labels;
764 /* If we did not find E in the hash table, then this must be the first
765 time we have been queried for information about E & T. Add all the
766 elements from T to the hash table then perform the query again. */
768 vec = SWITCH_LABELS (t);
769 n = TREE_VEC_LENGTH (vec);
770 for (i = 0; i < n; i++)
772 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
773 basic_block label_bb = label_to_block (lab);
774 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
776 goto restart;
779 /* Create the edges for a SWITCH_EXPR starting at block BB.
780 At this point, the switch body has been lowered and the
781 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
783 static void
784 make_switch_expr_edges (basic_block bb)
786 tree entry = last_stmt (bb);
787 size_t i, n;
788 tree vec;
790 vec = SWITCH_LABELS (entry);
791 n = TREE_VEC_LENGTH (vec);
793 for (i = 0; i < n; ++i)
795 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
796 basic_block label_bb = label_to_block (lab);
797 make_edge (bb, label_bb, 0);
802 /* Return the basic block holding label DEST. */
804 basic_block
805 label_to_block_fn (struct function *ifun, tree dest)
807 int uid = LABEL_DECL_UID (dest);
809 /* We would die hard when faced by an undefined label. Emit a label to
810 the very first basic block. This will hopefully make even the dataflow
811 and undefined variable warnings quite right. */
812 if ((errorcount || sorrycount) && uid < 0)
814 block_stmt_iterator bsi =
815 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
816 tree stmt;
818 stmt = build1 (LABEL_EXPR, void_type_node, dest);
819 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
820 uid = LABEL_DECL_UID (dest);
822 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
823 <= (unsigned int) uid)
824 return NULL;
825 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
828 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
829 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
831 void
832 make_abnormal_goto_edges (basic_block bb, bool for_call)
834 basic_block target_bb;
835 block_stmt_iterator bsi;
837 FOR_EACH_BB (target_bb)
838 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
840 tree target = bsi_stmt (bsi);
842 if (TREE_CODE (target) != LABEL_EXPR)
843 break;
845 target = LABEL_EXPR_LABEL (target);
847 /* Make an edge to every label block that has been marked as a
848 potential target for a computed goto or a non-local goto. */
849 if ((FORCED_LABEL (target) && !for_call)
850 || (DECL_NONLOCAL (target) && for_call))
852 make_edge (bb, target_bb, EDGE_ABNORMAL);
853 break;
858 /* Create edges for a goto statement at block BB. */
860 static void
861 make_goto_expr_edges (basic_block bb)
863 block_stmt_iterator last = bsi_last (bb);
864 tree goto_t = bsi_stmt (last);
866 /* A simple GOTO creates normal edges. */
867 if (simple_goto_p (goto_t))
869 tree dest = GOTO_DESTINATION (goto_t);
870 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
871 #ifdef USE_MAPPED_LOCATION
872 e->goto_locus = EXPR_LOCATION (goto_t);
873 #else
874 e->goto_locus = EXPR_LOCUS (goto_t);
875 #endif
876 bsi_remove (&last, true);
877 return;
880 /* A computed GOTO creates abnormal edges. */
881 make_abnormal_goto_edges (bb, false);
885 /*---------------------------------------------------------------------------
886 Flowgraph analysis
887 ---------------------------------------------------------------------------*/
889 /* Cleanup useless labels in basic blocks. This is something we wish
890 to do early because it allows us to group case labels before creating
891 the edges for the CFG, and it speeds up block statement iterators in
892 all passes later on.
893 We only run this pass once, running it more than once is probably not
894 profitable. */
896 /* A map from basic block index to the leading label of that block. */
897 static tree *label_for_bb;
899 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
900 static void
901 update_eh_label (struct eh_region *region)
903 tree old_label = get_eh_region_tree_label (region);
904 if (old_label)
906 tree new_label;
907 basic_block bb = label_to_block (old_label);
909 /* ??? After optimizing, there may be EH regions with labels
910 that have already been removed from the function body, so
911 there is no basic block for them. */
912 if (! bb)
913 return;
915 new_label = label_for_bb[bb->index];
916 set_eh_region_tree_label (region, new_label);
920 /* Given LABEL return the first label in the same basic block. */
921 static tree
922 main_block_label (tree label)
924 basic_block bb = label_to_block (label);
926 /* label_to_block possibly inserted undefined label into the chain. */
927 if (!label_for_bb[bb->index])
928 label_for_bb[bb->index] = label;
929 return label_for_bb[bb->index];
932 /* Cleanup redundant labels. This is a three-step process:
933 1) Find the leading label for each block.
934 2) Redirect all references to labels to the leading labels.
935 3) Cleanup all useless labels. */
937 void
938 cleanup_dead_labels (void)
940 basic_block bb;
941 label_for_bb = XCNEWVEC (tree, last_basic_block);
943 /* Find a suitable label for each block. We use the first user-defined
944 label if there is one, or otherwise just the first label we see. */
945 FOR_EACH_BB (bb)
947 block_stmt_iterator i;
949 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
951 tree label, stmt = bsi_stmt (i);
953 if (TREE_CODE (stmt) != LABEL_EXPR)
954 break;
956 label = LABEL_EXPR_LABEL (stmt);
958 /* If we have not yet seen a label for the current block,
959 remember this one and see if there are more labels. */
960 if (! label_for_bb[bb->index])
962 label_for_bb[bb->index] = label;
963 continue;
966 /* If we did see a label for the current block already, but it
967 is an artificially created label, replace it if the current
968 label is a user defined label. */
969 if (! DECL_ARTIFICIAL (label)
970 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
972 label_for_bb[bb->index] = label;
973 break;
978 /* Now redirect all jumps/branches to the selected label.
979 First do so for each block ending in a control statement. */
980 FOR_EACH_BB (bb)
982 tree stmt = last_stmt (bb);
983 if (!stmt)
984 continue;
986 switch (TREE_CODE (stmt))
988 case COND_EXPR:
990 tree true_branch, false_branch;
992 true_branch = COND_EXPR_THEN (stmt);
993 false_branch = COND_EXPR_ELSE (stmt);
995 GOTO_DESTINATION (true_branch)
996 = main_block_label (GOTO_DESTINATION (true_branch));
997 GOTO_DESTINATION (false_branch)
998 = main_block_label (GOTO_DESTINATION (false_branch));
1000 break;
1003 case SWITCH_EXPR:
1005 size_t i;
1006 tree vec = SWITCH_LABELS (stmt);
1007 size_t n = TREE_VEC_LENGTH (vec);
1009 /* Replace all destination labels. */
1010 for (i = 0; i < n; ++i)
1012 tree elt = TREE_VEC_ELT (vec, i);
1013 tree label = main_block_label (CASE_LABEL (elt));
1014 CASE_LABEL (elt) = label;
1016 break;
1019 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1020 remove them until after we've created the CFG edges. */
1021 case GOTO_EXPR:
1022 if (! computed_goto_p (stmt))
1024 GOTO_DESTINATION (stmt)
1025 = main_block_label (GOTO_DESTINATION (stmt));
1026 break;
1029 default:
1030 break;
1034 for_each_eh_region (update_eh_label);
1036 /* Finally, purge dead labels. All user-defined labels and labels that
1037 can be the target of non-local gotos and labels which have their
1038 address taken are preserved. */
1039 FOR_EACH_BB (bb)
1041 block_stmt_iterator i;
1042 tree label_for_this_bb = label_for_bb[bb->index];
1044 if (! label_for_this_bb)
1045 continue;
1047 for (i = bsi_start (bb); !bsi_end_p (i); )
1049 tree label, stmt = bsi_stmt (i);
1051 if (TREE_CODE (stmt) != LABEL_EXPR)
1052 break;
1054 label = LABEL_EXPR_LABEL (stmt);
1056 if (label == label_for_this_bb
1057 || ! DECL_ARTIFICIAL (label)
1058 || DECL_NONLOCAL (label)
1059 || FORCED_LABEL (label))
1060 bsi_next (&i);
1061 else
1062 bsi_remove (&i, true);
1066 free (label_for_bb);
1069 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1070 and scan the sorted vector of cases. Combine the ones jumping to the
1071 same label.
1072 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1074 void
1075 group_case_labels (void)
1077 basic_block bb;
1079 FOR_EACH_BB (bb)
1081 tree stmt = last_stmt (bb);
1082 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1084 tree labels = SWITCH_LABELS (stmt);
1085 int old_size = TREE_VEC_LENGTH (labels);
1086 int i, j, new_size = old_size;
1087 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1088 tree default_label;
1090 /* The default label is always the last case in a switch
1091 statement after gimplification. */
1092 default_label = CASE_LABEL (default_case);
1094 /* Look for possible opportunities to merge cases.
1095 Ignore the last element of the label vector because it
1096 must be the default case. */
1097 i = 0;
1098 while (i < old_size - 1)
1100 tree base_case, base_label, base_high;
1101 base_case = TREE_VEC_ELT (labels, i);
1103 gcc_assert (base_case);
1104 base_label = CASE_LABEL (base_case);
1106 /* Discard cases that have the same destination as the
1107 default case. */
1108 if (base_label == default_label)
1110 TREE_VEC_ELT (labels, i) = NULL_TREE;
1111 i++;
1112 new_size--;
1113 continue;
1116 base_high = CASE_HIGH (base_case) ?
1117 CASE_HIGH (base_case) : CASE_LOW (base_case);
1118 i++;
1119 /* Try to merge case labels. Break out when we reach the end
1120 of the label vector or when we cannot merge the next case
1121 label with the current one. */
1122 while (i < old_size - 1)
1124 tree merge_case = TREE_VEC_ELT (labels, i);
1125 tree merge_label = CASE_LABEL (merge_case);
1126 tree t = int_const_binop (PLUS_EXPR, base_high,
1127 integer_one_node, 1);
1129 /* Merge the cases if they jump to the same place,
1130 and their ranges are consecutive. */
1131 if (merge_label == base_label
1132 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1134 base_high = CASE_HIGH (merge_case) ?
1135 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1136 CASE_HIGH (base_case) = base_high;
1137 TREE_VEC_ELT (labels, i) = NULL_TREE;
1138 new_size--;
1139 i++;
1141 else
1142 break;
1146 /* Compress the case labels in the label vector, and adjust the
1147 length of the vector. */
1148 for (i = 0, j = 0; i < new_size; i++)
1150 while (! TREE_VEC_ELT (labels, j))
1151 j++;
1152 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1154 TREE_VEC_LENGTH (labels) = new_size;
1159 /* Checks whether we can merge block B into block A. */
1161 static bool
1162 tree_can_merge_blocks_p (basic_block a, basic_block b)
1164 tree stmt;
1165 block_stmt_iterator bsi;
1166 tree phi;
1168 if (!single_succ_p (a))
1169 return false;
1171 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1172 return false;
1174 if (single_succ (a) != b)
1175 return false;
1177 if (!single_pred_p (b))
1178 return false;
1180 if (b == EXIT_BLOCK_PTR)
1181 return false;
1183 /* If A ends by a statement causing exceptions or something similar, we
1184 cannot merge the blocks. */
1185 stmt = last_stmt (a);
1186 if (stmt && stmt_ends_bb_p (stmt))
1187 return false;
1189 /* Do not allow a block with only a non-local label to be merged. */
1190 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1191 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1192 return false;
1194 /* It must be possible to eliminate all phi nodes in B. If ssa form
1195 is not up-to-date, we cannot eliminate any phis; however, if only
1196 some symbols as whole are marked for renaming, this is not a problem,
1197 as phi nodes for those symbols are irrelevant in updating anyway. */
1198 phi = phi_nodes (b);
1199 if (phi)
1201 if (name_mappings_registered_p ())
1202 return false;
1204 for (; phi; phi = PHI_CHAIN (phi))
1205 if (!is_gimple_reg (PHI_RESULT (phi))
1206 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1207 return false;
1210 /* Do not remove user labels. */
1211 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1213 stmt = bsi_stmt (bsi);
1214 if (TREE_CODE (stmt) != LABEL_EXPR)
1215 break;
1216 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1217 return false;
1220 /* Protect the loop latches. */
1221 if (current_loops
1222 && b->loop_father->latch == b)
1223 return false;
1225 return true;
1228 /* Replaces all uses of NAME by VAL. */
1230 void
1231 replace_uses_by (tree name, tree val)
1233 imm_use_iterator imm_iter;
1234 use_operand_p use;
1235 tree stmt;
1236 edge e;
1238 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1240 if (TREE_CODE (stmt) != PHI_NODE)
1241 push_stmt_changes (&stmt);
1243 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1245 replace_exp (use, val);
1247 if (TREE_CODE (stmt) == PHI_NODE)
1249 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1250 if (e->flags & EDGE_ABNORMAL)
1252 /* This can only occur for virtual operands, since
1253 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1254 would prevent replacement. */
1255 gcc_assert (!is_gimple_reg (name));
1256 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1261 if (TREE_CODE (stmt) != PHI_NODE)
1263 tree rhs;
1265 fold_stmt_inplace (stmt);
1267 /* FIXME. This should go in pop_stmt_changes. */
1268 rhs = get_rhs (stmt);
1269 if (TREE_CODE (rhs) == ADDR_EXPR)
1270 recompute_tree_invariant_for_addr_expr (rhs);
1272 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1274 pop_stmt_changes (&stmt);
1278 gcc_assert (zero_imm_uses_p (name));
1280 /* Also update the trees stored in loop structures. */
1281 if (current_loops)
1283 struct loop *loop;
1284 loop_iterator li;
1286 FOR_EACH_LOOP (li, loop, 0)
1288 substitute_in_loop_info (loop, name, val);
1293 /* Merge block B into block A. */
1295 static void
1296 tree_merge_blocks (basic_block a, basic_block b)
1298 block_stmt_iterator bsi;
1299 tree_stmt_iterator last;
1300 tree phi;
1302 if (dump_file)
1303 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1305 /* Remove all single-valued PHI nodes from block B of the form
1306 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1307 bsi = bsi_last (a);
1308 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1310 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1311 tree copy;
1312 bool may_replace_uses = may_propagate_copy (def, use);
1314 /* In case we have loops to care about, do not propagate arguments of
1315 loop closed ssa phi nodes. */
1316 if (current_loops
1317 && is_gimple_reg (def)
1318 && TREE_CODE (use) == SSA_NAME
1319 && a->loop_father != b->loop_father)
1320 may_replace_uses = false;
1322 if (!may_replace_uses)
1324 gcc_assert (is_gimple_reg (def));
1326 /* Note that just emitting the copies is fine -- there is no problem
1327 with ordering of phi nodes. This is because A is the single
1328 predecessor of B, therefore results of the phi nodes cannot
1329 appear as arguments of the phi nodes. */
1330 copy = build2_gimple (GIMPLE_MODIFY_STMT, def, use);
1331 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1332 SSA_NAME_DEF_STMT (def) = copy;
1333 remove_phi_node (phi, NULL, false);
1335 else
1337 replace_uses_by (def, use);
1338 remove_phi_node (phi, NULL, true);
1342 /* Ensure that B follows A. */
1343 move_block_after (b, a);
1345 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1346 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1348 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1349 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1351 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1353 tree label = bsi_stmt (bsi);
1355 bsi_remove (&bsi, false);
1356 /* Now that we can thread computed gotos, we might have
1357 a situation where we have a forced label in block B
1358 However, the label at the start of block B might still be
1359 used in other ways (think about the runtime checking for
1360 Fortran assigned gotos). So we can not just delete the
1361 label. Instead we move the label to the start of block A. */
1362 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1364 block_stmt_iterator dest_bsi = bsi_start (a);
1365 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1368 else
1370 change_bb_for_stmt (bsi_stmt (bsi), a);
1371 bsi_next (&bsi);
1375 /* Merge the chains. */
1376 last = tsi_last (a->stmt_list);
1377 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1378 b->stmt_list = NULL;
1382 /* Return the one of two successors of BB that is not reachable by a
1383 reached by a complex edge, if there is one. Else, return BB. We use
1384 this in optimizations that use post-dominators for their heuristics,
1385 to catch the cases in C++ where function calls are involved. */
1387 basic_block
1388 single_noncomplex_succ (basic_block bb)
1390 edge e0, e1;
1391 if (EDGE_COUNT (bb->succs) != 2)
1392 return bb;
1394 e0 = EDGE_SUCC (bb, 0);
1395 e1 = EDGE_SUCC (bb, 1);
1396 if (e0->flags & EDGE_COMPLEX)
1397 return e1->dest;
1398 if (e1->flags & EDGE_COMPLEX)
1399 return e0->dest;
1401 return bb;
1405 /* Walk the function tree removing unnecessary statements.
1407 * Empty statement nodes are removed
1409 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1411 * Unnecessary COND_EXPRs are removed
1413 * Some unnecessary BIND_EXPRs are removed
1415 Clearly more work could be done. The trick is doing the analysis
1416 and removal fast enough to be a net improvement in compile times.
1418 Note that when we remove a control structure such as a COND_EXPR
1419 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1420 to ensure we eliminate all the useless code. */
1422 struct rus_data
1424 tree *last_goto;
1425 bool repeat;
1426 bool may_throw;
1427 bool may_branch;
1428 bool has_label;
1431 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1433 static bool
1434 remove_useless_stmts_warn_notreached (tree stmt)
1436 if (EXPR_HAS_LOCATION (stmt))
1438 location_t loc = EXPR_LOCATION (stmt);
1439 if (LOCATION_LINE (loc) > 0)
1441 warning (0, "%Hwill never be executed", &loc);
1442 return true;
1446 switch (TREE_CODE (stmt))
1448 case STATEMENT_LIST:
1450 tree_stmt_iterator i;
1451 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1452 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1453 return true;
1455 break;
1457 case COND_EXPR:
1458 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1459 return true;
1460 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1461 return true;
1462 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1463 return true;
1464 break;
1466 case TRY_FINALLY_EXPR:
1467 case TRY_CATCH_EXPR:
1468 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1469 return true;
1470 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1471 return true;
1472 break;
1474 case CATCH_EXPR:
1475 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1476 case EH_FILTER_EXPR:
1477 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1478 case BIND_EXPR:
1479 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1481 default:
1482 /* Not a live container. */
1483 break;
1486 return false;
1489 static void
1490 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1492 tree then_clause, else_clause, cond;
1493 bool save_has_label, then_has_label, else_has_label;
1495 save_has_label = data->has_label;
1496 data->has_label = false;
1497 data->last_goto = NULL;
1499 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1501 then_has_label = data->has_label;
1502 data->has_label = false;
1503 data->last_goto = NULL;
1505 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1507 else_has_label = data->has_label;
1508 data->has_label = save_has_label | then_has_label | else_has_label;
1510 then_clause = COND_EXPR_THEN (*stmt_p);
1511 else_clause = COND_EXPR_ELSE (*stmt_p);
1512 cond = fold (COND_EXPR_COND (*stmt_p));
1514 /* If neither arm does anything at all, we can remove the whole IF. */
1515 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1517 *stmt_p = build_empty_stmt ();
1518 data->repeat = true;
1521 /* If there are no reachable statements in an arm, then we can
1522 zap the entire conditional. */
1523 else if (integer_nonzerop (cond) && !else_has_label)
1525 if (warn_notreached)
1526 remove_useless_stmts_warn_notreached (else_clause);
1527 *stmt_p = then_clause;
1528 data->repeat = true;
1530 else if (integer_zerop (cond) && !then_has_label)
1532 if (warn_notreached)
1533 remove_useless_stmts_warn_notreached (then_clause);
1534 *stmt_p = else_clause;
1535 data->repeat = true;
1538 /* Check a couple of simple things on then/else with single stmts. */
1539 else
1541 tree then_stmt = expr_only (then_clause);
1542 tree else_stmt = expr_only (else_clause);
1544 /* Notice branches to a common destination. */
1545 if (then_stmt && else_stmt
1546 && TREE_CODE (then_stmt) == GOTO_EXPR
1547 && TREE_CODE (else_stmt) == GOTO_EXPR
1548 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1550 *stmt_p = then_stmt;
1551 data->repeat = true;
1554 /* If the THEN/ELSE clause merely assigns a value to a variable or
1555 parameter which is already known to contain that value, then
1556 remove the useless THEN/ELSE clause. */
1557 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1559 if (else_stmt
1560 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1561 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1562 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1563 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1565 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1566 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1567 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1568 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1570 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1571 ? then_stmt : else_stmt);
1572 tree *location = (TREE_CODE (cond) == EQ_EXPR
1573 ? &COND_EXPR_THEN (*stmt_p)
1574 : &COND_EXPR_ELSE (*stmt_p));
1576 if (stmt
1577 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1578 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1579 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1580 *location = alloc_stmt_list ();
1584 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1585 would be re-introduced during lowering. */
1586 data->last_goto = NULL;
1590 static void
1591 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1593 bool save_may_branch, save_may_throw;
1594 bool this_may_branch, this_may_throw;
1596 /* Collect may_branch and may_throw information for the body only. */
1597 save_may_branch = data->may_branch;
1598 save_may_throw = data->may_throw;
1599 data->may_branch = false;
1600 data->may_throw = false;
1601 data->last_goto = NULL;
1603 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1605 this_may_branch = data->may_branch;
1606 this_may_throw = data->may_throw;
1607 data->may_branch |= save_may_branch;
1608 data->may_throw |= save_may_throw;
1609 data->last_goto = NULL;
1611 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1613 /* If the body is empty, then we can emit the FINALLY block without
1614 the enclosing TRY_FINALLY_EXPR. */
1615 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1617 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1618 data->repeat = true;
1621 /* If the handler is empty, then we can emit the TRY block without
1622 the enclosing TRY_FINALLY_EXPR. */
1623 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1625 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1626 data->repeat = true;
1629 /* If the body neither throws, nor branches, then we can safely
1630 string the TRY and FINALLY blocks together. */
1631 else if (!this_may_branch && !this_may_throw)
1633 tree stmt = *stmt_p;
1634 *stmt_p = TREE_OPERAND (stmt, 0);
1635 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1636 data->repeat = true;
1641 static void
1642 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1644 bool save_may_throw, this_may_throw;
1645 tree_stmt_iterator i;
1646 tree stmt;
1648 /* Collect may_throw information for the body only. */
1649 save_may_throw = data->may_throw;
1650 data->may_throw = false;
1651 data->last_goto = NULL;
1653 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1655 this_may_throw = data->may_throw;
1656 data->may_throw = save_may_throw;
1658 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1659 if (!this_may_throw)
1661 if (warn_notreached)
1662 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1663 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1664 data->repeat = true;
1665 return;
1668 /* Process the catch clause specially. We may be able to tell that
1669 no exceptions propagate past this point. */
1671 this_may_throw = true;
1672 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1673 stmt = tsi_stmt (i);
1674 data->last_goto = NULL;
1676 switch (TREE_CODE (stmt))
1678 case CATCH_EXPR:
1679 for (; !tsi_end_p (i); tsi_next (&i))
1681 stmt = tsi_stmt (i);
1682 /* If we catch all exceptions, then the body does not
1683 propagate exceptions past this point. */
1684 if (CATCH_TYPES (stmt) == NULL)
1685 this_may_throw = false;
1686 data->last_goto = NULL;
1687 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1689 break;
1691 case EH_FILTER_EXPR:
1692 if (EH_FILTER_MUST_NOT_THROW (stmt))
1693 this_may_throw = false;
1694 else if (EH_FILTER_TYPES (stmt) == NULL)
1695 this_may_throw = false;
1696 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1697 break;
1699 default:
1700 /* Otherwise this is a cleanup. */
1701 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1703 /* If the cleanup is empty, then we can emit the TRY block without
1704 the enclosing TRY_CATCH_EXPR. */
1705 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1707 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1708 data->repeat = true;
1710 break;
1712 data->may_throw |= this_may_throw;
1716 static void
1717 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1719 tree block;
1721 /* First remove anything underneath the BIND_EXPR. */
1722 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1724 /* If the BIND_EXPR has no variables, then we can pull everything
1725 up one level and remove the BIND_EXPR, unless this is the toplevel
1726 BIND_EXPR for the current function or an inlined function.
1728 When this situation occurs we will want to apply this
1729 optimization again. */
1730 block = BIND_EXPR_BLOCK (*stmt_p);
1731 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1732 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1733 && (! block
1734 || ! BLOCK_ABSTRACT_ORIGIN (block)
1735 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1736 != FUNCTION_DECL)))
1738 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1739 data->repeat = true;
1744 static void
1745 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1747 tree dest = GOTO_DESTINATION (*stmt_p);
1749 data->may_branch = true;
1750 data->last_goto = NULL;
1752 /* Record the last goto expr, so that we can delete it if unnecessary. */
1753 if (TREE_CODE (dest) == LABEL_DECL)
1754 data->last_goto = stmt_p;
1758 static void
1759 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1761 tree label = LABEL_EXPR_LABEL (*stmt_p);
1763 data->has_label = true;
1765 /* We do want to jump across non-local label receiver code. */
1766 if (DECL_NONLOCAL (label))
1767 data->last_goto = NULL;
1769 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1771 *data->last_goto = build_empty_stmt ();
1772 data->repeat = true;
1775 /* ??? Add something here to delete unused labels. */
1779 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1780 decl. This allows us to eliminate redundant or useless
1781 calls to "const" functions.
1783 Gimplifier already does the same operation, but we may notice functions
1784 being const and pure once their calls has been gimplified, so we need
1785 to update the flag. */
1787 static void
1788 update_call_expr_flags (tree call)
1790 tree decl = get_callee_fndecl (call);
1791 if (!decl)
1792 return;
1793 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1794 TREE_SIDE_EFFECTS (call) = 0;
1795 if (TREE_NOTHROW (decl))
1796 TREE_NOTHROW (call) = 1;
1800 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1802 void
1803 notice_special_calls (tree t)
1805 int flags = call_expr_flags (t);
1807 if (flags & ECF_MAY_BE_ALLOCA)
1808 current_function_calls_alloca = true;
1809 if (flags & ECF_RETURNS_TWICE)
1810 current_function_calls_setjmp = true;
1814 /* Clear flags set by notice_special_calls. Used by dead code removal
1815 to update the flags. */
1817 void
1818 clear_special_calls (void)
1820 current_function_calls_alloca = false;
1821 current_function_calls_setjmp = false;
1825 static void
1826 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1828 tree t = *tp, op;
1830 switch (TREE_CODE (t))
1832 case COND_EXPR:
1833 remove_useless_stmts_cond (tp, data);
1834 break;
1836 case TRY_FINALLY_EXPR:
1837 remove_useless_stmts_tf (tp, data);
1838 break;
1840 case TRY_CATCH_EXPR:
1841 remove_useless_stmts_tc (tp, data);
1842 break;
1844 case BIND_EXPR:
1845 remove_useless_stmts_bind (tp, data);
1846 break;
1848 case GOTO_EXPR:
1849 remove_useless_stmts_goto (tp, data);
1850 break;
1852 case LABEL_EXPR:
1853 remove_useless_stmts_label (tp, data);
1854 break;
1856 case RETURN_EXPR:
1857 fold_stmt (tp);
1858 data->last_goto = NULL;
1859 data->may_branch = true;
1860 break;
1862 case CALL_EXPR:
1863 fold_stmt (tp);
1864 data->last_goto = NULL;
1865 notice_special_calls (t);
1866 update_call_expr_flags (t);
1867 if (tree_could_throw_p (t))
1868 data->may_throw = true;
1869 break;
1871 case MODIFY_EXPR:
1872 gcc_unreachable ();
1874 case GIMPLE_MODIFY_STMT:
1875 data->last_goto = NULL;
1876 fold_stmt (tp);
1877 op = get_call_expr_in (t);
1878 if (op)
1880 update_call_expr_flags (op);
1881 notice_special_calls (op);
1883 if (tree_could_throw_p (t))
1884 data->may_throw = true;
1885 break;
1887 case STATEMENT_LIST:
1889 tree_stmt_iterator i = tsi_start (t);
1890 while (!tsi_end_p (i))
1892 t = tsi_stmt (i);
1893 if (IS_EMPTY_STMT (t))
1895 tsi_delink (&i);
1896 continue;
1899 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1901 t = tsi_stmt (i);
1902 if (TREE_CODE (t) == STATEMENT_LIST)
1904 tsi_link_before (&i, t, TSI_SAME_STMT);
1905 tsi_delink (&i);
1907 else
1908 tsi_next (&i);
1911 break;
1912 case ASM_EXPR:
1913 fold_stmt (tp);
1914 data->last_goto = NULL;
1915 break;
1917 default:
1918 data->last_goto = NULL;
1919 break;
1923 static unsigned int
1924 remove_useless_stmts (void)
1926 struct rus_data data;
1928 clear_special_calls ();
1932 memset (&data, 0, sizeof (data));
1933 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1935 while (data.repeat);
1936 return 0;
1940 struct tree_opt_pass pass_remove_useless_stmts =
1942 "useless", /* name */
1943 NULL, /* gate */
1944 remove_useless_stmts, /* execute */
1945 NULL, /* sub */
1946 NULL, /* next */
1947 0, /* static_pass_number */
1948 0, /* tv_id */
1949 PROP_gimple_any, /* properties_required */
1950 0, /* properties_provided */
1951 0, /* properties_destroyed */
1952 0, /* todo_flags_start */
1953 TODO_dump_func, /* todo_flags_finish */
1954 0 /* letter */
1957 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1959 static void
1960 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1962 tree phi;
1964 /* Since this block is no longer reachable, we can just delete all
1965 of its PHI nodes. */
1966 phi = phi_nodes (bb);
1967 while (phi)
1969 tree next = PHI_CHAIN (phi);
1970 remove_phi_node (phi, NULL_TREE, true);
1971 phi = next;
1974 /* Remove edges to BB's successors. */
1975 while (EDGE_COUNT (bb->succs) > 0)
1976 remove_edge (EDGE_SUCC (bb, 0));
1980 /* Remove statements of basic block BB. */
1982 static void
1983 remove_bb (basic_block bb)
1985 block_stmt_iterator i;
1986 #ifdef USE_MAPPED_LOCATION
1987 source_location loc = UNKNOWN_LOCATION;
1988 #else
1989 source_locus loc = 0;
1990 #endif
1992 if (dump_file)
1994 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1995 if (dump_flags & TDF_DETAILS)
1997 dump_bb (bb, dump_file, 0);
1998 fprintf (dump_file, "\n");
2002 if (current_loops)
2004 struct loop *loop = bb->loop_father;
2006 /* If a loop gets removed, clean up the information associated
2007 with it. */
2008 if (loop->latch == bb
2009 || loop->header == bb)
2010 free_numbers_of_iterations_estimates_loop (loop);
2013 /* Remove all the instructions in the block. */
2014 for (i = bsi_start (bb); !bsi_end_p (i);)
2016 tree stmt = bsi_stmt (i);
2017 if (TREE_CODE (stmt) == LABEL_EXPR
2018 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2019 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2021 basic_block new_bb;
2022 block_stmt_iterator new_bsi;
2024 /* A non-reachable non-local label may still be referenced.
2025 But it no longer needs to carry the extra semantics of
2026 non-locality. */
2027 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2029 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2030 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2033 new_bb = bb->prev_bb;
2034 new_bsi = bsi_start (new_bb);
2035 bsi_remove (&i, false);
2036 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2038 else
2040 /* Release SSA definitions if we are in SSA. Note that we
2041 may be called when not in SSA. For example,
2042 final_cleanup calls this function via
2043 cleanup_tree_cfg. */
2044 if (gimple_in_ssa_p (cfun))
2045 release_defs (stmt);
2047 bsi_remove (&i, true);
2050 /* Don't warn for removed gotos. Gotos are often removed due to
2051 jump threading, thus resulting in bogus warnings. Not great,
2052 since this way we lose warnings for gotos in the original
2053 program that are indeed unreachable. */
2054 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2056 #ifdef USE_MAPPED_LOCATION
2057 if (EXPR_HAS_LOCATION (stmt))
2058 loc = EXPR_LOCATION (stmt);
2059 #else
2060 source_locus t;
2061 t = EXPR_LOCUS (stmt);
2062 if (t && LOCATION_LINE (*t) > 0)
2063 loc = t;
2064 #endif
2068 /* If requested, give a warning that the first statement in the
2069 block is unreachable. We walk statements backwards in the
2070 loop above, so the last statement we process is the first statement
2071 in the block. */
2072 #ifdef USE_MAPPED_LOCATION
2073 if (loc > BUILTINS_LOCATION)
2074 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2075 #else
2076 if (loc)
2077 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2078 #endif
2080 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2084 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2085 predicate VAL, return the edge that will be taken out of the block.
2086 If VAL does not match a unique edge, NULL is returned. */
2088 edge
2089 find_taken_edge (basic_block bb, tree val)
2091 tree stmt;
2093 stmt = last_stmt (bb);
2095 gcc_assert (stmt);
2096 gcc_assert (is_ctrl_stmt (stmt));
2097 gcc_assert (val);
2099 if (! is_gimple_min_invariant (val))
2100 return NULL;
2102 if (TREE_CODE (stmt) == COND_EXPR)
2103 return find_taken_edge_cond_expr (bb, val);
2105 if (TREE_CODE (stmt) == SWITCH_EXPR)
2106 return find_taken_edge_switch_expr (bb, val);
2108 if (computed_goto_p (stmt))
2109 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2111 gcc_unreachable ();
2114 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2115 statement, determine which of the outgoing edges will be taken out of the
2116 block. Return NULL if either edge may be taken. */
2118 static edge
2119 find_taken_edge_computed_goto (basic_block bb, tree val)
2121 basic_block dest;
2122 edge e = NULL;
2124 dest = label_to_block (val);
2125 if (dest)
2127 e = find_edge (bb, dest);
2128 gcc_assert (e != NULL);
2131 return e;
2134 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2135 statement, determine which of the two edges will be taken out of the
2136 block. Return NULL if either edge may be taken. */
2138 static edge
2139 find_taken_edge_cond_expr (basic_block bb, tree val)
2141 edge true_edge, false_edge;
2143 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2145 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2146 return (integer_zerop (val) ? false_edge : true_edge);
2149 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2150 statement, determine which edge will be taken out of the block. Return
2151 NULL if any edge may be taken. */
2153 static edge
2154 find_taken_edge_switch_expr (basic_block bb, tree val)
2156 tree switch_expr, taken_case;
2157 basic_block dest_bb;
2158 edge e;
2160 switch_expr = last_stmt (bb);
2161 taken_case = find_case_label_for_value (switch_expr, val);
2162 dest_bb = label_to_block (CASE_LABEL (taken_case));
2164 e = find_edge (bb, dest_bb);
2165 gcc_assert (e);
2166 return e;
2170 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2171 We can make optimal use here of the fact that the case labels are
2172 sorted: We can do a binary search for a case matching VAL. */
2174 static tree
2175 find_case_label_for_value (tree switch_expr, tree val)
2177 tree vec = SWITCH_LABELS (switch_expr);
2178 size_t low, high, n = TREE_VEC_LENGTH (vec);
2179 tree default_case = TREE_VEC_ELT (vec, n - 1);
2181 for (low = -1, high = n - 1; high - low > 1; )
2183 size_t i = (high + low) / 2;
2184 tree t = TREE_VEC_ELT (vec, i);
2185 int cmp;
2187 /* Cache the result of comparing CASE_LOW and val. */
2188 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2190 if (cmp > 0)
2191 high = i;
2192 else
2193 low = i;
2195 if (CASE_HIGH (t) == NULL)
2197 /* A singe-valued case label. */
2198 if (cmp == 0)
2199 return t;
2201 else
2203 /* A case range. We can only handle integer ranges. */
2204 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2205 return t;
2209 return default_case;
2215 /*---------------------------------------------------------------------------
2216 Debugging functions
2217 ---------------------------------------------------------------------------*/
2219 /* Dump tree-specific information of block BB to file OUTF. */
2221 void
2222 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2224 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2228 /* Dump a basic block on stderr. */
2230 void
2231 debug_tree_bb (basic_block bb)
2233 dump_bb (bb, stderr, 0);
2237 /* Dump basic block with index N on stderr. */
2239 basic_block
2240 debug_tree_bb_n (int n)
2242 debug_tree_bb (BASIC_BLOCK (n));
2243 return BASIC_BLOCK (n);
2247 /* Dump the CFG on stderr.
2249 FLAGS are the same used by the tree dumping functions
2250 (see TDF_* in tree-pass.h). */
2252 void
2253 debug_tree_cfg (int flags)
2255 dump_tree_cfg (stderr, flags);
2259 /* Dump the program showing basic block boundaries on the given FILE.
2261 FLAGS are the same used by the tree dumping functions (see TDF_* in
2262 tree.h). */
2264 void
2265 dump_tree_cfg (FILE *file, int flags)
2267 if (flags & TDF_DETAILS)
2269 const char *funcname
2270 = lang_hooks.decl_printable_name (current_function_decl, 2);
2272 fputc ('\n', file);
2273 fprintf (file, ";; Function %s\n\n", funcname);
2274 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2275 n_basic_blocks, n_edges, last_basic_block);
2277 brief_dump_cfg (file);
2278 fprintf (file, "\n");
2281 if (flags & TDF_STATS)
2282 dump_cfg_stats (file);
2284 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2288 /* Dump CFG statistics on FILE. */
2290 void
2291 dump_cfg_stats (FILE *file)
2293 static long max_num_merged_labels = 0;
2294 unsigned long size, total = 0;
2295 long num_edges;
2296 basic_block bb;
2297 const char * const fmt_str = "%-30s%-13s%12s\n";
2298 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2299 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2300 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2301 const char *funcname
2302 = lang_hooks.decl_printable_name (current_function_decl, 2);
2305 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2307 fprintf (file, "---------------------------------------------------------\n");
2308 fprintf (file, fmt_str, "", " Number of ", "Memory");
2309 fprintf (file, fmt_str, "", " instances ", "used ");
2310 fprintf (file, "---------------------------------------------------------\n");
2312 size = n_basic_blocks * sizeof (struct basic_block_def);
2313 total += size;
2314 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2315 SCALE (size), LABEL (size));
2317 num_edges = 0;
2318 FOR_EACH_BB (bb)
2319 num_edges += EDGE_COUNT (bb->succs);
2320 size = num_edges * sizeof (struct edge_def);
2321 total += size;
2322 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2324 fprintf (file, "---------------------------------------------------------\n");
2325 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2326 LABEL (total));
2327 fprintf (file, "---------------------------------------------------------\n");
2328 fprintf (file, "\n");
2330 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2331 max_num_merged_labels = cfg_stats.num_merged_labels;
2333 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2334 cfg_stats.num_merged_labels, max_num_merged_labels);
2336 fprintf (file, "\n");
2340 /* Dump CFG statistics on stderr. Keep extern so that it's always
2341 linked in the final executable. */
2343 void
2344 debug_cfg_stats (void)
2346 dump_cfg_stats (stderr);
2350 /* Dump the flowgraph to a .vcg FILE. */
2352 static void
2353 tree_cfg2vcg (FILE *file)
2355 edge e;
2356 edge_iterator ei;
2357 basic_block bb;
2358 const char *funcname
2359 = lang_hooks.decl_printable_name (current_function_decl, 2);
2361 /* Write the file header. */
2362 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2363 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2364 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2366 /* Write blocks and edges. */
2367 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2369 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2370 e->dest->index);
2372 if (e->flags & EDGE_FAKE)
2373 fprintf (file, " linestyle: dotted priority: 10");
2374 else
2375 fprintf (file, " linestyle: solid priority: 100");
2377 fprintf (file, " }\n");
2379 fputc ('\n', file);
2381 FOR_EACH_BB (bb)
2383 enum tree_code head_code, end_code;
2384 const char *head_name, *end_name;
2385 int head_line = 0;
2386 int end_line = 0;
2387 tree first = first_stmt (bb);
2388 tree last = last_stmt (bb);
2390 if (first)
2392 head_code = TREE_CODE (first);
2393 head_name = tree_code_name[head_code];
2394 head_line = get_lineno (first);
2396 else
2397 head_name = "no-statement";
2399 if (last)
2401 end_code = TREE_CODE (last);
2402 end_name = tree_code_name[end_code];
2403 end_line = get_lineno (last);
2405 else
2406 end_name = "no-statement";
2408 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2409 bb->index, bb->index, head_name, head_line, end_name,
2410 end_line);
2412 FOR_EACH_EDGE (e, ei, bb->succs)
2414 if (e->dest == EXIT_BLOCK_PTR)
2415 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2416 else
2417 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2419 if (e->flags & EDGE_FAKE)
2420 fprintf (file, " priority: 10 linestyle: dotted");
2421 else
2422 fprintf (file, " priority: 100 linestyle: solid");
2424 fprintf (file, " }\n");
2427 if (bb->next_bb != EXIT_BLOCK_PTR)
2428 fputc ('\n', file);
2431 fputs ("}\n\n", file);
2436 /*---------------------------------------------------------------------------
2437 Miscellaneous helpers
2438 ---------------------------------------------------------------------------*/
2440 /* Return true if T represents a stmt that always transfers control. */
2442 bool
2443 is_ctrl_stmt (tree t)
2445 return (TREE_CODE (t) == COND_EXPR
2446 || TREE_CODE (t) == SWITCH_EXPR
2447 || TREE_CODE (t) == GOTO_EXPR
2448 || TREE_CODE (t) == RETURN_EXPR
2449 || TREE_CODE (t) == RESX_EXPR);
2453 /* Return true if T is a statement that may alter the flow of control
2454 (e.g., a call to a non-returning function). */
2456 bool
2457 is_ctrl_altering_stmt (tree t)
2459 tree call;
2461 gcc_assert (t);
2462 call = get_call_expr_in (t);
2463 if (call)
2465 /* A non-pure/const CALL_EXPR alters flow control if the current
2466 function has nonlocal labels. */
2467 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2468 return true;
2470 /* A CALL_EXPR also alters control flow if it does not return. */
2471 if (call_expr_flags (call) & ECF_NORETURN)
2472 return true;
2475 /* OpenMP directives alter control flow. */
2476 if (OMP_DIRECTIVE_P (t))
2477 return true;
2479 /* If a statement can throw, it alters control flow. */
2480 return tree_can_throw_internal (t);
2484 /* Return true if T is a computed goto. */
2486 bool
2487 computed_goto_p (tree t)
2489 return (TREE_CODE (t) == GOTO_EXPR
2490 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2494 /* Return true if T is a simple local goto. */
2496 bool
2497 simple_goto_p (tree t)
2499 return (TREE_CODE (t) == GOTO_EXPR
2500 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2504 /* Return true if T can make an abnormal transfer of control flow.
2505 Transfers of control flow associated with EH are excluded. */
2507 bool
2508 tree_can_make_abnormal_goto (tree t)
2510 if (computed_goto_p (t))
2511 return true;
2512 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2513 t = GIMPLE_STMT_OPERAND (t, 1);
2514 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2515 t = TREE_OPERAND (t, 0);
2516 if (TREE_CODE (t) == CALL_EXPR)
2517 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2518 return false;
2522 /* Return true if T should start a new basic block. PREV_T is the
2523 statement preceding T. It is used when T is a label or a case label.
2524 Labels should only start a new basic block if their previous statement
2525 wasn't a label. Otherwise, sequence of labels would generate
2526 unnecessary basic blocks that only contain a single label. */
2528 static inline bool
2529 stmt_starts_bb_p (tree t, tree prev_t)
2531 if (t == NULL_TREE)
2532 return false;
2534 /* LABEL_EXPRs start a new basic block only if the preceding
2535 statement wasn't a label of the same type. This prevents the
2536 creation of consecutive blocks that have nothing but a single
2537 label. */
2538 if (TREE_CODE (t) == LABEL_EXPR)
2540 /* Nonlocal and computed GOTO targets always start a new block. */
2541 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2542 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2543 return true;
2545 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2547 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2548 return true;
2550 cfg_stats.num_merged_labels++;
2551 return false;
2553 else
2554 return true;
2557 return false;
2561 /* Return true if T should end a basic block. */
2563 bool
2564 stmt_ends_bb_p (tree t)
2566 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2570 /* Add gotos that used to be represented implicitly in the CFG. */
2572 void
2573 disband_implicit_edges (void)
2575 basic_block bb;
2576 block_stmt_iterator last;
2577 edge e;
2578 edge_iterator ei;
2579 tree stmt, label;
2581 FOR_EACH_BB (bb)
2583 last = bsi_last (bb);
2584 stmt = last_stmt (bb);
2586 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2588 /* Remove superfluous gotos from COND_EXPR branches. Moved
2589 from cfg_remove_useless_stmts here since it violates the
2590 invariants for tree--cfg correspondence and thus fits better
2591 here where we do it anyway. */
2592 e = find_edge (bb, bb->next_bb);
2593 if (e)
2595 if (e->flags & EDGE_TRUE_VALUE)
2596 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2597 else if (e->flags & EDGE_FALSE_VALUE)
2598 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2599 else
2600 gcc_unreachable ();
2601 e->flags |= EDGE_FALLTHRU;
2604 continue;
2607 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2609 /* Remove the RETURN_EXPR if we may fall though to the exit
2610 instead. */
2611 gcc_assert (single_succ_p (bb));
2612 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2614 if (bb->next_bb == EXIT_BLOCK_PTR
2615 && !TREE_OPERAND (stmt, 0))
2617 bsi_remove (&last, true);
2618 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2620 continue;
2623 /* There can be no fallthru edge if the last statement is a control
2624 one. */
2625 if (stmt && is_ctrl_stmt (stmt))
2626 continue;
2628 /* Find a fallthru edge and emit the goto if necessary. */
2629 FOR_EACH_EDGE (e, ei, bb->succs)
2630 if (e->flags & EDGE_FALLTHRU)
2631 break;
2633 if (!e || e->dest == bb->next_bb)
2634 continue;
2636 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2637 label = tree_block_label (e->dest);
2639 stmt = build1 (GOTO_EXPR, void_type_node, label);
2640 #ifdef USE_MAPPED_LOCATION
2641 SET_EXPR_LOCATION (stmt, e->goto_locus);
2642 #else
2643 SET_EXPR_LOCUS (stmt, e->goto_locus);
2644 #endif
2645 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2646 e->flags &= ~EDGE_FALLTHRU;
2650 /* Remove block annotations and other datastructures. */
2652 void
2653 delete_tree_cfg_annotations (void)
2655 basic_block bb;
2656 block_stmt_iterator bsi;
2658 /* Remove annotations from every tree in the function. */
2659 FOR_EACH_BB (bb)
2660 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2662 tree stmt = bsi_stmt (bsi);
2663 ggc_free (stmt->base.ann);
2664 stmt->base.ann = NULL;
2666 label_to_block_map = NULL;
2670 /* Return the first statement in basic block BB. */
2672 tree
2673 first_stmt (basic_block bb)
2675 block_stmt_iterator i = bsi_start (bb);
2676 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2680 /* Return the last statement in basic block BB. */
2682 tree
2683 last_stmt (basic_block bb)
2685 block_stmt_iterator b = bsi_last (bb);
2686 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2690 /* Return the last statement of an otherwise empty block. Return NULL
2691 if the block is totally empty, or if it contains more than one
2692 statement. */
2694 tree
2695 last_and_only_stmt (basic_block bb)
2697 block_stmt_iterator i = bsi_last (bb);
2698 tree last, prev;
2700 if (bsi_end_p (i))
2701 return NULL_TREE;
2703 last = bsi_stmt (i);
2704 bsi_prev (&i);
2705 if (bsi_end_p (i))
2706 return last;
2708 /* Empty statements should no longer appear in the instruction stream.
2709 Everything that might have appeared before should be deleted by
2710 remove_useless_stmts, and the optimizers should just bsi_remove
2711 instead of smashing with build_empty_stmt.
2713 Thus the only thing that should appear here in a block containing
2714 one executable statement is a label. */
2715 prev = bsi_stmt (i);
2716 if (TREE_CODE (prev) == LABEL_EXPR)
2717 return last;
2718 else
2719 return NULL_TREE;
2723 /* Mark BB as the basic block holding statement T. */
2725 void
2726 set_bb_for_stmt (tree t, basic_block bb)
2728 if (TREE_CODE (t) == PHI_NODE)
2729 PHI_BB (t) = bb;
2730 else if (TREE_CODE (t) == STATEMENT_LIST)
2732 tree_stmt_iterator i;
2733 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2734 set_bb_for_stmt (tsi_stmt (i), bb);
2736 else
2738 stmt_ann_t ann = get_stmt_ann (t);
2739 ann->bb = bb;
2741 /* If the statement is a label, add the label to block-to-labels map
2742 so that we can speed up edge creation for GOTO_EXPRs. */
2743 if (TREE_CODE (t) == LABEL_EXPR)
2745 int uid;
2747 t = LABEL_EXPR_LABEL (t);
2748 uid = LABEL_DECL_UID (t);
2749 if (uid == -1)
2751 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2752 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2753 if (old_len <= (unsigned) uid)
2755 unsigned new_len = 3 * uid / 2;
2757 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2758 new_len);
2761 else
2762 /* We're moving an existing label. Make sure that we've
2763 removed it from the old block. */
2764 gcc_assert (!bb
2765 || !VEC_index (basic_block, label_to_block_map, uid));
2766 VEC_replace (basic_block, label_to_block_map, uid, bb);
2771 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2772 from one basic block to another.
2773 For BB splitting we can run into quadratic case, so performance is quite
2774 important and knowing that the tables are big enough, change_bb_for_stmt
2775 can inline as leaf function. */
2776 static inline void
2777 change_bb_for_stmt (tree t, basic_block bb)
2779 get_stmt_ann (t)->bb = bb;
2780 if (TREE_CODE (t) == LABEL_EXPR)
2781 VEC_replace (basic_block, label_to_block_map,
2782 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2785 /* Finds iterator for STMT. */
2787 extern block_stmt_iterator
2788 bsi_for_stmt (tree stmt)
2790 block_stmt_iterator bsi;
2792 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2793 if (bsi_stmt (bsi) == stmt)
2794 return bsi;
2796 gcc_unreachable ();
2799 /* Mark statement T as modified, and update it. */
2800 static inline void
2801 update_modified_stmts (tree t)
2803 if (!ssa_operands_active ())
2804 return;
2805 if (TREE_CODE (t) == STATEMENT_LIST)
2807 tree_stmt_iterator i;
2808 tree stmt;
2809 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2811 stmt = tsi_stmt (i);
2812 update_stmt_if_modified (stmt);
2815 else
2816 update_stmt_if_modified (t);
2819 /* Insert statement (or statement list) T before the statement
2820 pointed-to by iterator I. M specifies how to update iterator I
2821 after insertion (see enum bsi_iterator_update). */
2823 void
2824 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2826 set_bb_for_stmt (t, i->bb);
2827 update_modified_stmts (t);
2828 tsi_link_before (&i->tsi, t, m);
2832 /* Insert statement (or statement list) T after the statement
2833 pointed-to by iterator I. M specifies how to update iterator I
2834 after insertion (see enum bsi_iterator_update). */
2836 void
2837 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2839 set_bb_for_stmt (t, i->bb);
2840 update_modified_stmts (t);
2841 tsi_link_after (&i->tsi, t, m);
2845 /* Remove the statement pointed to by iterator I. The iterator is updated
2846 to the next statement.
2848 When REMOVE_EH_INFO is true we remove the statement pointed to by
2849 iterator I from the EH tables. Otherwise we do not modify the EH
2850 tables.
2852 Generally, REMOVE_EH_INFO should be true when the statement is going to
2853 be removed from the IL and not reinserted elsewhere. */
2855 void
2856 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2858 tree t = bsi_stmt (*i);
2859 set_bb_for_stmt (t, NULL);
2860 delink_stmt_imm_use (t);
2861 tsi_delink (&i->tsi);
2862 mark_stmt_modified (t);
2863 if (remove_eh_info)
2865 remove_stmt_from_eh_region (t);
2866 gimple_remove_stmt_histograms (cfun, t);
2871 /* Move the statement at FROM so it comes right after the statement at TO. */
2873 void
2874 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2876 tree stmt = bsi_stmt (*from);
2877 bsi_remove (from, false);
2878 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2882 /* Move the statement at FROM so it comes right before the statement at TO. */
2884 void
2885 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2887 tree stmt = bsi_stmt (*from);
2888 bsi_remove (from, false);
2889 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2893 /* Move the statement at FROM to the end of basic block BB. */
2895 void
2896 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2898 block_stmt_iterator last = bsi_last (bb);
2900 /* Have to check bsi_end_p because it could be an empty block. */
2901 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2902 bsi_move_before (from, &last);
2903 else
2904 bsi_move_after (from, &last);
2908 /* Replace the contents of the statement pointed to by iterator BSI
2909 with STMT. If UPDATE_EH_INFO is true, the exception handling
2910 information of the original statement is moved to the new statement. */
2912 void
2913 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2915 int eh_region;
2916 tree orig_stmt = bsi_stmt (*bsi);
2918 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2919 set_bb_for_stmt (stmt, bsi->bb);
2921 /* Preserve EH region information from the original statement, if
2922 requested by the caller. */
2923 if (update_eh_info)
2925 eh_region = lookup_stmt_eh_region (orig_stmt);
2926 if (eh_region >= 0)
2928 remove_stmt_from_eh_region (orig_stmt);
2929 add_stmt_to_eh_region (stmt, eh_region);
2930 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2931 gimple_remove_stmt_histograms (cfun, orig_stmt);
2935 delink_stmt_imm_use (orig_stmt);
2936 *bsi_stmt_ptr (*bsi) = stmt;
2937 mark_stmt_modified (stmt);
2938 update_modified_stmts (stmt);
2942 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2943 is made to place the statement in an existing basic block, but
2944 sometimes that isn't possible. When it isn't possible, the edge is
2945 split and the statement is added to the new block.
2947 In all cases, the returned *BSI points to the correct location. The
2948 return value is true if insertion should be done after the location,
2949 or false if it should be done before the location. If new basic block
2950 has to be created, it is stored in *NEW_BB. */
2952 static bool
2953 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2954 basic_block *new_bb)
2956 basic_block dest, src;
2957 tree tmp;
2959 dest = e->dest;
2960 restart:
2962 /* If the destination has one predecessor which has no PHI nodes,
2963 insert there. Except for the exit block.
2965 The requirement for no PHI nodes could be relaxed. Basically we
2966 would have to examine the PHIs to prove that none of them used
2967 the value set by the statement we want to insert on E. That
2968 hardly seems worth the effort. */
2969 if (single_pred_p (dest)
2970 && ! phi_nodes (dest)
2971 && dest != EXIT_BLOCK_PTR)
2973 *bsi = bsi_start (dest);
2974 if (bsi_end_p (*bsi))
2975 return true;
2977 /* Make sure we insert after any leading labels. */
2978 tmp = bsi_stmt (*bsi);
2979 while (TREE_CODE (tmp) == LABEL_EXPR)
2981 bsi_next (bsi);
2982 if (bsi_end_p (*bsi))
2983 break;
2984 tmp = bsi_stmt (*bsi);
2987 if (bsi_end_p (*bsi))
2989 *bsi = bsi_last (dest);
2990 return true;
2992 else
2993 return false;
2996 /* If the source has one successor, the edge is not abnormal and
2997 the last statement does not end a basic block, insert there.
2998 Except for the entry block. */
2999 src = e->src;
3000 if ((e->flags & EDGE_ABNORMAL) == 0
3001 && single_succ_p (src)
3002 && src != ENTRY_BLOCK_PTR)
3004 *bsi = bsi_last (src);
3005 if (bsi_end_p (*bsi))
3006 return true;
3008 tmp = bsi_stmt (*bsi);
3009 if (!stmt_ends_bb_p (tmp))
3010 return true;
3012 /* Insert code just before returning the value. We may need to decompose
3013 the return in the case it contains non-trivial operand. */
3014 if (TREE_CODE (tmp) == RETURN_EXPR)
3016 tree op = TREE_OPERAND (tmp, 0);
3017 if (op && !is_gimple_val (op))
3019 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
3020 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3021 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
3023 bsi_prev (bsi);
3024 return true;
3028 /* Otherwise, create a new basic block, and split this edge. */
3029 dest = split_edge (e);
3030 if (new_bb)
3031 *new_bb = dest;
3032 e = single_pred_edge (dest);
3033 goto restart;
3037 /* This routine will commit all pending edge insertions, creating any new
3038 basic blocks which are necessary. */
3040 void
3041 bsi_commit_edge_inserts (void)
3043 basic_block bb;
3044 edge e;
3045 edge_iterator ei;
3047 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3049 FOR_EACH_BB (bb)
3050 FOR_EACH_EDGE (e, ei, bb->succs)
3051 bsi_commit_one_edge_insert (e, NULL);
3055 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3056 to this block, otherwise set it to NULL. */
3058 void
3059 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3061 if (new_bb)
3062 *new_bb = NULL;
3063 if (PENDING_STMT (e))
3065 block_stmt_iterator bsi;
3066 tree stmt = PENDING_STMT (e);
3068 PENDING_STMT (e) = NULL_TREE;
3070 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3071 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3072 else
3073 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3078 /* Add STMT to the pending list of edge E. No actual insertion is
3079 made until a call to bsi_commit_edge_inserts () is made. */
3081 void
3082 bsi_insert_on_edge (edge e, tree stmt)
3084 append_to_statement_list (stmt, &PENDING_STMT (e));
3087 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3088 block has to be created, it is returned. */
3090 basic_block
3091 bsi_insert_on_edge_immediate (edge e, tree stmt)
3093 block_stmt_iterator bsi;
3094 basic_block new_bb = NULL;
3096 gcc_assert (!PENDING_STMT (e));
3098 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3099 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3100 else
3101 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3103 return new_bb;
3106 /*---------------------------------------------------------------------------
3107 Tree specific functions for CFG manipulation
3108 ---------------------------------------------------------------------------*/
3110 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3112 static void
3113 reinstall_phi_args (edge new_edge, edge old_edge)
3115 tree var, phi;
3117 if (!PENDING_STMT (old_edge))
3118 return;
3120 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3121 var && phi;
3122 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3124 tree result = TREE_PURPOSE (var);
3125 tree arg = TREE_VALUE (var);
3127 gcc_assert (result == PHI_RESULT (phi));
3129 add_phi_arg (phi, arg, new_edge);
3132 PENDING_STMT (old_edge) = NULL;
3135 /* Returns the basic block after which the new basic block created
3136 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3137 near its "logical" location. This is of most help to humans looking
3138 at debugging dumps. */
3140 static basic_block
3141 split_edge_bb_loc (edge edge_in)
3143 basic_block dest = edge_in->dest;
3145 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3146 return edge_in->src;
3147 else
3148 return dest->prev_bb;
3151 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3152 Abort on abnormal edges. */
3154 static basic_block
3155 tree_split_edge (edge edge_in)
3157 basic_block new_bb, after_bb, dest;
3158 edge new_edge, e;
3160 /* Abnormal edges cannot be split. */
3161 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3163 dest = edge_in->dest;
3165 after_bb = split_edge_bb_loc (edge_in);
3167 new_bb = create_empty_bb (after_bb);
3168 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3169 new_bb->count = edge_in->count;
3170 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3171 new_edge->probability = REG_BR_PROB_BASE;
3172 new_edge->count = edge_in->count;
3174 e = redirect_edge_and_branch (edge_in, new_bb);
3175 gcc_assert (e);
3176 reinstall_phi_args (new_edge, e);
3178 return new_bb;
3182 /* Return true when BB has label LABEL in it. */
3184 static bool
3185 has_label_p (basic_block bb, tree label)
3187 block_stmt_iterator bsi;
3189 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3191 tree stmt = bsi_stmt (bsi);
3193 if (TREE_CODE (stmt) != LABEL_EXPR)
3194 return false;
3195 if (LABEL_EXPR_LABEL (stmt) == label)
3196 return true;
3198 return false;
3202 /* Callback for walk_tree, check that all elements with address taken are
3203 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3204 inside a PHI node. */
3206 static tree
3207 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3209 tree t = *tp, x;
3210 bool in_phi = (data != NULL);
3212 if (TYPE_P (t))
3213 *walk_subtrees = 0;
3215 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3216 #define CHECK_OP(N, MSG) \
3217 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3218 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3220 switch (TREE_CODE (t))
3222 case SSA_NAME:
3223 if (SSA_NAME_IN_FREE_LIST (t))
3225 error ("SSA name in freelist but still referenced");
3226 return *tp;
3228 break;
3230 case ASSERT_EXPR:
3231 x = fold (ASSERT_EXPR_COND (t));
3232 if (x == boolean_false_node)
3234 error ("ASSERT_EXPR with an always-false condition");
3235 return *tp;
3237 break;
3239 case MODIFY_EXPR:
3240 gcc_unreachable ();
3242 case GIMPLE_MODIFY_STMT:
3243 x = GIMPLE_STMT_OPERAND (t, 0);
3244 if (TREE_CODE (x) == BIT_FIELD_REF
3245 && is_gimple_reg (TREE_OPERAND (x, 0)))
3247 error ("GIMPLE register modified with BIT_FIELD_REF");
3248 return t;
3250 break;
3252 case ADDR_EXPR:
3254 bool old_invariant;
3255 bool old_constant;
3256 bool old_side_effects;
3257 bool new_invariant;
3258 bool new_constant;
3259 bool new_side_effects;
3261 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3262 dead PHIs that take the address of something. But if the PHI
3263 result is dead, the fact that it takes the address of anything
3264 is irrelevant. Because we can not tell from here if a PHI result
3265 is dead, we just skip this check for PHIs altogether. This means
3266 we may be missing "valid" checks, but what can you do?
3267 This was PR19217. */
3268 if (in_phi)
3269 break;
3271 old_invariant = TREE_INVARIANT (t);
3272 old_constant = TREE_CONSTANT (t);
3273 old_side_effects = TREE_SIDE_EFFECTS (t);
3275 recompute_tree_invariant_for_addr_expr (t);
3276 new_invariant = TREE_INVARIANT (t);
3277 new_side_effects = TREE_SIDE_EFFECTS (t);
3278 new_constant = TREE_CONSTANT (t);
3280 if (old_invariant != new_invariant)
3282 error ("invariant not recomputed when ADDR_EXPR changed");
3283 return t;
3286 if (old_constant != new_constant)
3288 error ("constant not recomputed when ADDR_EXPR changed");
3289 return t;
3291 if (old_side_effects != new_side_effects)
3293 error ("side effects not recomputed when ADDR_EXPR changed");
3294 return t;
3297 /* Skip any references (they will be checked when we recurse down the
3298 tree) and ensure that any variable used as a prefix is marked
3299 addressable. */
3300 for (x = TREE_OPERAND (t, 0);
3301 handled_component_p (x);
3302 x = TREE_OPERAND (x, 0))
3305 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3306 return NULL;
3307 if (!TREE_ADDRESSABLE (x))
3309 error ("address taken, but ADDRESSABLE bit not set");
3310 return x;
3312 break;
3315 case COND_EXPR:
3316 x = COND_EXPR_COND (t);
3317 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3319 error ("non-boolean used in condition");
3320 return x;
3322 if (!is_gimple_condexpr (x))
3324 error ("invalid conditional operand");
3325 return x;
3327 break;
3329 case NOP_EXPR:
3330 case CONVERT_EXPR:
3331 case FIX_TRUNC_EXPR:
3332 case FLOAT_EXPR:
3333 case NEGATE_EXPR:
3334 case ABS_EXPR:
3335 case BIT_NOT_EXPR:
3336 case NON_LVALUE_EXPR:
3337 case TRUTH_NOT_EXPR:
3338 CHECK_OP (0, "invalid operand to unary operator");
3339 break;
3341 case REALPART_EXPR:
3342 case IMAGPART_EXPR:
3343 case COMPONENT_REF:
3344 case ARRAY_REF:
3345 case ARRAY_RANGE_REF:
3346 case BIT_FIELD_REF:
3347 case VIEW_CONVERT_EXPR:
3348 /* We have a nest of references. Verify that each of the operands
3349 that determine where to reference is either a constant or a variable,
3350 verify that the base is valid, and then show we've already checked
3351 the subtrees. */
3352 while (handled_component_p (t))
3354 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3355 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3356 else if (TREE_CODE (t) == ARRAY_REF
3357 || TREE_CODE (t) == ARRAY_RANGE_REF)
3359 CHECK_OP (1, "invalid array index");
3360 if (TREE_OPERAND (t, 2))
3361 CHECK_OP (2, "invalid array lower bound");
3362 if (TREE_OPERAND (t, 3))
3363 CHECK_OP (3, "invalid array stride");
3365 else if (TREE_CODE (t) == BIT_FIELD_REF)
3367 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3368 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3371 t = TREE_OPERAND (t, 0);
3374 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3376 error ("invalid reference prefix");
3377 return t;
3379 *walk_subtrees = 0;
3380 break;
3382 case LT_EXPR:
3383 case LE_EXPR:
3384 case GT_EXPR:
3385 case GE_EXPR:
3386 case EQ_EXPR:
3387 case NE_EXPR:
3388 case UNORDERED_EXPR:
3389 case ORDERED_EXPR:
3390 case UNLT_EXPR:
3391 case UNLE_EXPR:
3392 case UNGT_EXPR:
3393 case UNGE_EXPR:
3394 case UNEQ_EXPR:
3395 case LTGT_EXPR:
3396 case PLUS_EXPR:
3397 case MINUS_EXPR:
3398 case MULT_EXPR:
3399 case TRUNC_DIV_EXPR:
3400 case CEIL_DIV_EXPR:
3401 case FLOOR_DIV_EXPR:
3402 case ROUND_DIV_EXPR:
3403 case TRUNC_MOD_EXPR:
3404 case CEIL_MOD_EXPR:
3405 case FLOOR_MOD_EXPR:
3406 case ROUND_MOD_EXPR:
3407 case RDIV_EXPR:
3408 case EXACT_DIV_EXPR:
3409 case MIN_EXPR:
3410 case MAX_EXPR:
3411 case LSHIFT_EXPR:
3412 case RSHIFT_EXPR:
3413 case LROTATE_EXPR:
3414 case RROTATE_EXPR:
3415 case BIT_IOR_EXPR:
3416 case BIT_XOR_EXPR:
3417 case BIT_AND_EXPR:
3418 CHECK_OP (0, "invalid operand to binary operator");
3419 CHECK_OP (1, "invalid operand to binary operator");
3420 break;
3422 case CONSTRUCTOR:
3423 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3424 *walk_subtrees = 0;
3425 break;
3427 default:
3428 break;
3430 return NULL;
3432 #undef CHECK_OP
3436 /* Verify STMT, return true if STMT is not in GIMPLE form.
3437 TODO: Implement type checking. */
3439 static bool
3440 verify_stmt (tree stmt, bool last_in_block)
3442 tree addr;
3444 if (OMP_DIRECTIVE_P (stmt))
3446 /* OpenMP directives are validated by the FE and never operated
3447 on by the optimizers. Furthermore, OMP_FOR may contain
3448 non-gimple expressions when the main index variable has had
3449 its address taken. This does not affect the loop itself
3450 because the header of an OMP_FOR is merely used to determine
3451 how to setup the parallel iteration. */
3452 return false;
3455 if (!is_gimple_stmt (stmt))
3457 error ("is not a valid GIMPLE statement");
3458 goto fail;
3461 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3462 if (addr)
3464 debug_generic_stmt (addr);
3465 return true;
3468 /* If the statement is marked as part of an EH region, then it is
3469 expected that the statement could throw. Verify that when we
3470 have optimizations that simplify statements such that we prove
3471 that they cannot throw, that we update other data structures
3472 to match. */
3473 if (lookup_stmt_eh_region (stmt) >= 0)
3475 if (!tree_could_throw_p (stmt))
3477 error ("statement marked for throw, but doesn%'t");
3478 goto fail;
3480 if (!last_in_block && tree_can_throw_internal (stmt))
3482 error ("statement marked for throw in middle of block");
3483 goto fail;
3487 return false;
3489 fail:
3490 debug_generic_stmt (stmt);
3491 return true;
3495 /* Return true when the T can be shared. */
3497 static bool
3498 tree_node_can_be_shared (tree t)
3500 if (IS_TYPE_OR_DECL_P (t)
3501 || is_gimple_min_invariant (t)
3502 || TREE_CODE (t) == SSA_NAME
3503 || t == error_mark_node
3504 || TREE_CODE (t) == IDENTIFIER_NODE)
3505 return true;
3507 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3508 return true;
3510 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3511 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3512 || TREE_CODE (t) == COMPONENT_REF
3513 || TREE_CODE (t) == REALPART_EXPR
3514 || TREE_CODE (t) == IMAGPART_EXPR)
3515 t = TREE_OPERAND (t, 0);
3517 if (DECL_P (t))
3518 return true;
3520 return false;
3524 /* Called via walk_trees. Verify tree sharing. */
3526 static tree
3527 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3529 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3531 if (tree_node_can_be_shared (*tp))
3533 *walk_subtrees = false;
3534 return NULL;
3537 if (pointer_set_insert (visited, *tp))
3538 return *tp;
3540 return NULL;
3544 /* Helper function for verify_gimple_tuples. */
3546 static tree
3547 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3548 void *data ATTRIBUTE_UNUSED)
3550 switch (TREE_CODE (*tp))
3552 case MODIFY_EXPR:
3553 error ("unexpected non-tuple");
3554 debug_tree (*tp);
3555 gcc_unreachable ();
3556 return NULL_TREE;
3558 default:
3559 return NULL_TREE;
3563 /* Verify that there are no trees that should have been converted to
3564 gimple tuples. Return true if T contains a node that should have
3565 been converted to a gimple tuple, but hasn't. */
3567 static bool
3568 verify_gimple_tuples (tree t)
3570 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
3573 static bool eh_error_found;
3574 static int
3575 verify_eh_throw_stmt_node (void **slot, void *data)
3577 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
3578 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3580 if (!pointer_set_contains (visited, node->stmt))
3582 error ("Dead STMT in EH table");
3583 debug_generic_stmt (node->stmt);
3584 eh_error_found = true;
3586 return 0;
3589 /* Verify the GIMPLE statement chain. */
3591 void
3592 verify_stmts (void)
3594 basic_block bb;
3595 block_stmt_iterator bsi;
3596 bool err = false;
3597 struct pointer_set_t *visited, *visited_stmts;
3598 tree addr;
3600 timevar_push (TV_TREE_STMT_VERIFY);
3601 visited = pointer_set_create ();
3602 visited_stmts = pointer_set_create ();
3604 FOR_EACH_BB (bb)
3606 tree phi;
3607 int i;
3609 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3611 int phi_num_args = PHI_NUM_ARGS (phi);
3613 pointer_set_insert (visited_stmts, phi);
3614 if (bb_for_stmt (phi) != bb)
3616 error ("bb_for_stmt (phi) is set to a wrong basic block");
3617 err |= true;
3620 for (i = 0; i < phi_num_args; i++)
3622 tree t = PHI_ARG_DEF (phi, i);
3623 tree addr;
3625 /* Addressable variables do have SSA_NAMEs but they
3626 are not considered gimple values. */
3627 if (TREE_CODE (t) != SSA_NAME
3628 && TREE_CODE (t) != FUNCTION_DECL
3629 && !is_gimple_val (t))
3631 error ("PHI def is not a GIMPLE value");
3632 debug_generic_stmt (phi);
3633 debug_generic_stmt (t);
3634 err |= true;
3637 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3638 if (addr)
3640 debug_generic_stmt (addr);
3641 err |= true;
3644 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
3645 if (addr)
3647 error ("incorrect sharing of tree nodes");
3648 debug_generic_stmt (phi);
3649 debug_generic_stmt (addr);
3650 err |= true;
3655 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3657 tree stmt = bsi_stmt (bsi);
3659 pointer_set_insert (visited_stmts, stmt);
3660 err |= verify_gimple_tuples (stmt);
3662 if (bb_for_stmt (stmt) != bb)
3664 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3665 err |= true;
3668 bsi_next (&bsi);
3669 err |= verify_stmt (stmt, bsi_end_p (bsi));
3670 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
3671 if (addr)
3673 error ("incorrect sharing of tree nodes");
3674 debug_generic_stmt (stmt);
3675 debug_generic_stmt (addr);
3676 err |= true;
3680 eh_error_found = false;
3681 if (get_eh_throw_stmt_table (cfun))
3682 htab_traverse (get_eh_throw_stmt_table (cfun),
3683 verify_eh_throw_stmt_node,
3684 visited_stmts);
3686 if (err | eh_error_found)
3687 internal_error ("verify_stmts failed");
3689 pointer_set_destroy (visited);
3690 pointer_set_destroy (visited_stmts);
3691 verify_histograms ();
3692 timevar_pop (TV_TREE_STMT_VERIFY);
3696 /* Verifies that the flow information is OK. */
3698 static int
3699 tree_verify_flow_info (void)
3701 int err = 0;
3702 basic_block bb;
3703 block_stmt_iterator bsi;
3704 tree stmt;
3705 edge e;
3706 edge_iterator ei;
3708 if (ENTRY_BLOCK_PTR->stmt_list)
3710 error ("ENTRY_BLOCK has a statement list associated with it");
3711 err = 1;
3714 if (EXIT_BLOCK_PTR->stmt_list)
3716 error ("EXIT_BLOCK has a statement list associated with it");
3717 err = 1;
3720 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3721 if (e->flags & EDGE_FALLTHRU)
3723 error ("fallthru to exit from bb %d", e->src->index);
3724 err = 1;
3727 FOR_EACH_BB (bb)
3729 bool found_ctrl_stmt = false;
3731 stmt = NULL_TREE;
3733 /* Skip labels on the start of basic block. */
3734 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3736 tree prev_stmt = stmt;
3738 stmt = bsi_stmt (bsi);
3740 if (TREE_CODE (stmt) != LABEL_EXPR)
3741 break;
3743 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3745 error ("nonlocal label ");
3746 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3747 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3748 bb->index);
3749 err = 1;
3752 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3754 error ("label ");
3755 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3756 fprintf (stderr, " to block does not match in bb %d",
3757 bb->index);
3758 err = 1;
3761 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3762 != current_function_decl)
3764 error ("label ");
3765 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3766 fprintf (stderr, " has incorrect context in bb %d",
3767 bb->index);
3768 err = 1;
3772 /* Verify that body of basic block BB is free of control flow. */
3773 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3775 tree stmt = bsi_stmt (bsi);
3777 if (found_ctrl_stmt)
3779 error ("control flow in the middle of basic block %d",
3780 bb->index);
3781 err = 1;
3784 if (stmt_ends_bb_p (stmt))
3785 found_ctrl_stmt = true;
3787 if (TREE_CODE (stmt) == LABEL_EXPR)
3789 error ("label ");
3790 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3791 fprintf (stderr, " in the middle of basic block %d", bb->index);
3792 err = 1;
3796 bsi = bsi_last (bb);
3797 if (bsi_end_p (bsi))
3798 continue;
3800 stmt = bsi_stmt (bsi);
3802 err |= verify_eh_edges (stmt);
3804 if (is_ctrl_stmt (stmt))
3806 FOR_EACH_EDGE (e, ei, bb->succs)
3807 if (e->flags & EDGE_FALLTHRU)
3809 error ("fallthru edge after a control statement in bb %d",
3810 bb->index);
3811 err = 1;
3815 if (TREE_CODE (stmt) != COND_EXPR)
3817 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3818 after anything else but if statement. */
3819 FOR_EACH_EDGE (e, ei, bb->succs)
3820 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3822 error ("true/false edge after a non-COND_EXPR in bb %d",
3823 bb->index);
3824 err = 1;
3828 switch (TREE_CODE (stmt))
3830 case COND_EXPR:
3832 edge true_edge;
3833 edge false_edge;
3834 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3835 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3837 error ("structured COND_EXPR at the end of bb %d", bb->index);
3838 err = 1;
3841 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3843 if (!true_edge || !false_edge
3844 || !(true_edge->flags & EDGE_TRUE_VALUE)
3845 || !(false_edge->flags & EDGE_FALSE_VALUE)
3846 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3847 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3848 || EDGE_COUNT (bb->succs) >= 3)
3850 error ("wrong outgoing edge flags at end of bb %d",
3851 bb->index);
3852 err = 1;
3855 if (!has_label_p (true_edge->dest,
3856 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3858 error ("%<then%> label does not match edge at end of bb %d",
3859 bb->index);
3860 err = 1;
3863 if (!has_label_p (false_edge->dest,
3864 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3866 error ("%<else%> label does not match edge at end of bb %d",
3867 bb->index);
3868 err = 1;
3871 break;
3873 case GOTO_EXPR:
3874 if (simple_goto_p (stmt))
3876 error ("explicit goto at end of bb %d", bb->index);
3877 err = 1;
3879 else
3881 /* FIXME. We should double check that the labels in the
3882 destination blocks have their address taken. */
3883 FOR_EACH_EDGE (e, ei, bb->succs)
3884 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3885 | EDGE_FALSE_VALUE))
3886 || !(e->flags & EDGE_ABNORMAL))
3888 error ("wrong outgoing edge flags at end of bb %d",
3889 bb->index);
3890 err = 1;
3893 break;
3895 case RETURN_EXPR:
3896 if (!single_succ_p (bb)
3897 || (single_succ_edge (bb)->flags
3898 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3899 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3901 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3902 err = 1;
3904 if (single_succ (bb) != EXIT_BLOCK_PTR)
3906 error ("return edge does not point to exit in bb %d",
3907 bb->index);
3908 err = 1;
3910 break;
3912 case SWITCH_EXPR:
3914 tree prev;
3915 edge e;
3916 size_t i, n;
3917 tree vec;
3919 vec = SWITCH_LABELS (stmt);
3920 n = TREE_VEC_LENGTH (vec);
3922 /* Mark all the destination basic blocks. */
3923 for (i = 0; i < n; ++i)
3925 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3926 basic_block label_bb = label_to_block (lab);
3928 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3929 label_bb->aux = (void *)1;
3932 /* Verify that the case labels are sorted. */
3933 prev = TREE_VEC_ELT (vec, 0);
3934 for (i = 1; i < n - 1; ++i)
3936 tree c = TREE_VEC_ELT (vec, i);
3937 if (! CASE_LOW (c))
3939 error ("found default case not at end of case vector");
3940 err = 1;
3941 continue;
3943 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3945 error ("case labels not sorted: ");
3946 print_generic_expr (stderr, prev, 0);
3947 fprintf (stderr," is greater than ");
3948 print_generic_expr (stderr, c, 0);
3949 fprintf (stderr," but comes before it.\n");
3950 err = 1;
3952 prev = c;
3954 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3956 error ("no default case found at end of case vector");
3957 err = 1;
3960 FOR_EACH_EDGE (e, ei, bb->succs)
3962 if (!e->dest->aux)
3964 error ("extra outgoing edge %d->%d",
3965 bb->index, e->dest->index);
3966 err = 1;
3968 e->dest->aux = (void *)2;
3969 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3970 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3972 error ("wrong outgoing edge flags at end of bb %d",
3973 bb->index);
3974 err = 1;
3978 /* Check that we have all of them. */
3979 for (i = 0; i < n; ++i)
3981 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3982 basic_block label_bb = label_to_block (lab);
3984 if (label_bb->aux != (void *)2)
3986 error ("missing edge %i->%i",
3987 bb->index, label_bb->index);
3988 err = 1;
3992 FOR_EACH_EDGE (e, ei, bb->succs)
3993 e->dest->aux = (void *)0;
3996 default: ;
4000 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
4001 verify_dominators (CDI_DOMINATORS);
4003 return err;
4007 /* Updates phi nodes after creating a forwarder block joined
4008 by edge FALLTHRU. */
4010 static void
4011 tree_make_forwarder_block (edge fallthru)
4013 edge e;
4014 edge_iterator ei;
4015 basic_block dummy, bb;
4016 tree phi, new_phi, var;
4018 dummy = fallthru->src;
4019 bb = fallthru->dest;
4021 if (single_pred_p (bb))
4022 return;
4024 /* If we redirected a branch we must create new PHI nodes at the
4025 start of BB. */
4026 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4028 var = PHI_RESULT (phi);
4029 new_phi = create_phi_node (var, bb);
4030 SSA_NAME_DEF_STMT (var) = new_phi;
4031 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4032 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4035 /* Ensure that the PHI node chain is in the same order. */
4036 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4038 /* Add the arguments we have stored on edges. */
4039 FOR_EACH_EDGE (e, ei, bb->preds)
4041 if (e == fallthru)
4042 continue;
4044 flush_pending_stmts (e);
4049 /* Return a non-special label in the head of basic block BLOCK.
4050 Create one if it doesn't exist. */
4052 tree
4053 tree_block_label (basic_block bb)
4055 block_stmt_iterator i, s = bsi_start (bb);
4056 bool first = true;
4057 tree label, stmt;
4059 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4061 stmt = bsi_stmt (i);
4062 if (TREE_CODE (stmt) != LABEL_EXPR)
4063 break;
4064 label = LABEL_EXPR_LABEL (stmt);
4065 if (!DECL_NONLOCAL (label))
4067 if (!first)
4068 bsi_move_before (&i, &s);
4069 return label;
4073 label = create_artificial_label ();
4074 stmt = build1 (LABEL_EXPR, void_type_node, label);
4075 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4076 return label;
4080 /* Attempt to perform edge redirection by replacing a possibly complex
4081 jump instruction by a goto or by removing the jump completely.
4082 This can apply only if all edges now point to the same block. The
4083 parameters and return values are equivalent to
4084 redirect_edge_and_branch. */
4086 static edge
4087 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4089 basic_block src = e->src;
4090 block_stmt_iterator b;
4091 tree stmt;
4093 /* We can replace or remove a complex jump only when we have exactly
4094 two edges. */
4095 if (EDGE_COUNT (src->succs) != 2
4096 /* Verify that all targets will be TARGET. Specifically, the
4097 edge that is not E must also go to TARGET. */
4098 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4099 return NULL;
4101 b = bsi_last (src);
4102 if (bsi_end_p (b))
4103 return NULL;
4104 stmt = bsi_stmt (b);
4106 if (TREE_CODE (stmt) == COND_EXPR
4107 || TREE_CODE (stmt) == SWITCH_EXPR)
4109 bsi_remove (&b, true);
4110 e = ssa_redirect_edge (e, target);
4111 e->flags = EDGE_FALLTHRU;
4112 return e;
4115 return NULL;
4119 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4120 edge representing the redirected branch. */
4122 static edge
4123 tree_redirect_edge_and_branch (edge e, basic_block dest)
4125 basic_block bb = e->src;
4126 block_stmt_iterator bsi;
4127 edge ret;
4128 tree label, stmt;
4130 if (e->flags & EDGE_ABNORMAL)
4131 return NULL;
4133 if (e->src != ENTRY_BLOCK_PTR
4134 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4135 return ret;
4137 if (e->dest == dest)
4138 return NULL;
4140 label = tree_block_label (dest);
4142 bsi = bsi_last (bb);
4143 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4145 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4147 case COND_EXPR:
4148 stmt = (e->flags & EDGE_TRUE_VALUE
4149 ? COND_EXPR_THEN (stmt)
4150 : COND_EXPR_ELSE (stmt));
4151 GOTO_DESTINATION (stmt) = label;
4152 break;
4154 case GOTO_EXPR:
4155 /* No non-abnormal edges should lead from a non-simple goto, and
4156 simple ones should be represented implicitly. */
4157 gcc_unreachable ();
4159 case SWITCH_EXPR:
4161 tree cases = get_cases_for_edge (e, stmt);
4163 /* If we have a list of cases associated with E, then use it
4164 as it's a lot faster than walking the entire case vector. */
4165 if (cases)
4167 edge e2 = find_edge (e->src, dest);
4168 tree last, first;
4170 first = cases;
4171 while (cases)
4173 last = cases;
4174 CASE_LABEL (cases) = label;
4175 cases = TREE_CHAIN (cases);
4178 /* If there was already an edge in the CFG, then we need
4179 to move all the cases associated with E to E2. */
4180 if (e2)
4182 tree cases2 = get_cases_for_edge (e2, stmt);
4184 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4185 TREE_CHAIN (cases2) = first;
4188 else
4190 tree vec = SWITCH_LABELS (stmt);
4191 size_t i, n = TREE_VEC_LENGTH (vec);
4193 for (i = 0; i < n; i++)
4195 tree elt = TREE_VEC_ELT (vec, i);
4197 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4198 CASE_LABEL (elt) = label;
4202 break;
4205 case RETURN_EXPR:
4206 bsi_remove (&bsi, true);
4207 e->flags |= EDGE_FALLTHRU;
4208 break;
4210 default:
4211 /* Otherwise it must be a fallthru edge, and we don't need to
4212 do anything besides redirecting it. */
4213 gcc_assert (e->flags & EDGE_FALLTHRU);
4214 break;
4217 /* Update/insert PHI nodes as necessary. */
4219 /* Now update the edges in the CFG. */
4220 e = ssa_redirect_edge (e, dest);
4222 return e;
4226 /* Simple wrapper, as we can always redirect fallthru edges. */
4228 static basic_block
4229 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4231 e = tree_redirect_edge_and_branch (e, dest);
4232 gcc_assert (e);
4234 return NULL;
4238 /* Splits basic block BB after statement STMT (but at least after the
4239 labels). If STMT is NULL, BB is split just after the labels. */
4241 static basic_block
4242 tree_split_block (basic_block bb, void *stmt)
4244 block_stmt_iterator bsi;
4245 tree_stmt_iterator tsi_tgt;
4246 tree act;
4247 basic_block new_bb;
4248 edge e;
4249 edge_iterator ei;
4251 new_bb = create_empty_bb (bb);
4253 /* Redirect the outgoing edges. */
4254 new_bb->succs = bb->succs;
4255 bb->succs = NULL;
4256 FOR_EACH_EDGE (e, ei, new_bb->succs)
4257 e->src = new_bb;
4259 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4260 stmt = NULL;
4262 /* Move everything from BSI to the new basic block. */
4263 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4265 act = bsi_stmt (bsi);
4266 if (TREE_CODE (act) == LABEL_EXPR)
4267 continue;
4269 if (!stmt)
4270 break;
4272 if (stmt == act)
4274 bsi_next (&bsi);
4275 break;
4279 if (bsi_end_p (bsi))
4280 return new_bb;
4282 /* Split the statement list - avoid re-creating new containers as this
4283 brings ugly quadratic memory consumption in the inliner.
4284 (We are still quadratic since we need to update stmt BB pointers,
4285 sadly.) */
4286 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4287 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4288 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4289 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4291 return new_bb;
4295 /* Moves basic block BB after block AFTER. */
4297 static bool
4298 tree_move_block_after (basic_block bb, basic_block after)
4300 if (bb->prev_bb == after)
4301 return true;
4303 unlink_block (bb);
4304 link_block (bb, after);
4306 return true;
4310 /* Return true if basic_block can be duplicated. */
4312 static bool
4313 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4315 return true;
4319 /* Create a duplicate of the basic block BB. NOTE: This does not
4320 preserve SSA form. */
4322 static basic_block
4323 tree_duplicate_bb (basic_block bb)
4325 basic_block new_bb;
4326 block_stmt_iterator bsi, bsi_tgt;
4327 tree phi;
4329 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4331 /* Copy the PHI nodes. We ignore PHI node arguments here because
4332 the incoming edges have not been setup yet. */
4333 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4335 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4336 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4339 /* Keep the chain of PHI nodes in the same order so that they can be
4340 updated by ssa_redirect_edge. */
4341 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4343 bsi_tgt = bsi_start (new_bb);
4344 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4346 def_operand_p def_p;
4347 ssa_op_iter op_iter;
4348 tree stmt, copy;
4349 int region;
4351 stmt = bsi_stmt (bsi);
4352 if (TREE_CODE (stmt) == LABEL_EXPR)
4353 continue;
4355 /* Create a new copy of STMT and duplicate STMT's virtual
4356 operands. */
4357 copy = unshare_expr (stmt);
4358 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4359 copy_virtual_operands (copy, stmt);
4360 region = lookup_stmt_eh_region (stmt);
4361 if (region >= 0)
4362 add_stmt_to_eh_region (copy, region);
4363 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4365 /* Create new names for all the definitions created by COPY and
4366 add replacement mappings for each new name. */
4367 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4368 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4371 return new_bb;
4375 /* Basic block BB_COPY was created by code duplication. Add phi node
4376 arguments for edges going out of BB_COPY. The blocks that were
4377 duplicated have BB_DUPLICATED set. */
4379 void
4380 add_phi_args_after_copy_bb (basic_block bb_copy)
4382 basic_block bb, dest;
4383 edge e, e_copy;
4384 edge_iterator ei;
4385 tree phi, phi_copy, phi_next, def;
4387 bb = get_bb_original (bb_copy);
4389 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4391 if (!phi_nodes (e_copy->dest))
4392 continue;
4394 if (e_copy->dest->flags & BB_DUPLICATED)
4395 dest = get_bb_original (e_copy->dest);
4396 else
4397 dest = e_copy->dest;
4399 e = find_edge (bb, dest);
4400 if (!e)
4402 /* During loop unrolling the target of the latch edge is copied.
4403 In this case we are not looking for edge to dest, but to
4404 duplicated block whose original was dest. */
4405 FOR_EACH_EDGE (e, ei, bb->succs)
4406 if ((e->dest->flags & BB_DUPLICATED)
4407 && get_bb_original (e->dest) == dest)
4408 break;
4410 gcc_assert (e != NULL);
4413 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4414 phi;
4415 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4417 phi_next = PHI_CHAIN (phi);
4418 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4419 add_phi_arg (phi_copy, def, e_copy);
4424 /* Blocks in REGION_COPY array of length N_REGION were created by
4425 duplication of basic blocks. Add phi node arguments for edges
4426 going from these blocks. */
4428 void
4429 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4431 unsigned i;
4433 for (i = 0; i < n_region; i++)
4434 region_copy[i]->flags |= BB_DUPLICATED;
4436 for (i = 0; i < n_region; i++)
4437 add_phi_args_after_copy_bb (region_copy[i]);
4439 for (i = 0; i < n_region; i++)
4440 region_copy[i]->flags &= ~BB_DUPLICATED;
4443 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4444 important exit edge EXIT. By important we mean that no SSA name defined
4445 inside region is live over the other exit edges of the region. All entry
4446 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4447 to the duplicate of the region. SSA form, dominance and loop information
4448 is updated. The new basic blocks are stored to REGION_COPY in the same
4449 order as they had in REGION, provided that REGION_COPY is not NULL.
4450 The function returns false if it is unable to copy the region,
4451 true otherwise. */
4453 bool
4454 tree_duplicate_sese_region (edge entry, edge exit,
4455 basic_block *region, unsigned n_region,
4456 basic_block *region_copy)
4458 unsigned i, n_doms;
4459 bool free_region_copy = false, copying_header = false;
4460 struct loop *loop = entry->dest->loop_father;
4461 edge exit_copy;
4462 basic_block *doms;
4463 edge redirected;
4464 int total_freq = 0, entry_freq = 0;
4465 gcov_type total_count = 0, entry_count = 0;
4467 if (!can_copy_bbs_p (region, n_region))
4468 return false;
4470 /* Some sanity checking. Note that we do not check for all possible
4471 missuses of the functions. I.e. if you ask to copy something weird,
4472 it will work, but the state of structures probably will not be
4473 correct. */
4474 for (i = 0; i < n_region; i++)
4476 /* We do not handle subloops, i.e. all the blocks must belong to the
4477 same loop. */
4478 if (region[i]->loop_father != loop)
4479 return false;
4481 if (region[i] != entry->dest
4482 && region[i] == loop->header)
4483 return false;
4486 loop->copy = loop;
4488 /* In case the function is used for loop header copying (which is the primary
4489 use), ensure that EXIT and its copy will be new latch and entry edges. */
4490 if (loop->header == entry->dest)
4492 copying_header = true;
4493 loop->copy = loop->outer;
4495 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4496 return false;
4498 for (i = 0; i < n_region; i++)
4499 if (region[i] != exit->src
4500 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4501 return false;
4504 if (!region_copy)
4506 region_copy = XNEWVEC (basic_block, n_region);
4507 free_region_copy = true;
4510 gcc_assert (!need_ssa_update_p ());
4512 /* Record blocks outside the region that are dominated by something
4513 inside. */
4514 doms = XNEWVEC (basic_block, n_basic_blocks);
4515 initialize_original_copy_tables ();
4517 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4519 if (entry->dest->count)
4521 total_count = entry->dest->count;
4522 entry_count = entry->count;
4523 /* Fix up corner cases, to avoid division by zero or creation of negative
4524 frequencies. */
4525 if (entry_count > total_count)
4526 entry_count = total_count;
4528 else
4530 total_freq = entry->dest->frequency;
4531 entry_freq = EDGE_FREQUENCY (entry);
4532 /* Fix up corner cases, to avoid division by zero or creation of negative
4533 frequencies. */
4534 if (total_freq == 0)
4535 total_freq = 1;
4536 else if (entry_freq > total_freq)
4537 entry_freq = total_freq;
4540 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4541 split_edge_bb_loc (entry));
4542 if (total_count)
4544 scale_bbs_frequencies_gcov_type (region, n_region,
4545 total_count - entry_count,
4546 total_count);
4547 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4548 total_count);
4550 else
4552 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4553 total_freq);
4554 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4557 if (copying_header)
4559 loop->header = exit->dest;
4560 loop->latch = exit->src;
4563 /* Redirect the entry and add the phi node arguments. */
4564 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4565 gcc_assert (redirected != NULL);
4566 flush_pending_stmts (entry);
4568 /* Concerning updating of dominators: We must recount dominators
4569 for entry block and its copy. Anything that is outside of the
4570 region, but was dominated by something inside needs recounting as
4571 well. */
4572 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4573 doms[n_doms++] = get_bb_original (entry->dest);
4574 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4575 free (doms);
4577 /* Add the other PHI node arguments. */
4578 add_phi_args_after_copy (region_copy, n_region);
4580 /* Update the SSA web. */
4581 update_ssa (TODO_update_ssa);
4583 if (free_region_copy)
4584 free (region_copy);
4586 free_original_copy_tables ();
4587 return true;
4591 DEF_VEC_P(basic_block);
4592 DEF_VEC_ALLOC_P(basic_block,heap);
4595 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4596 adding blocks when the dominator traversal reaches EXIT. This
4597 function silently assumes that ENTRY strictly dominates EXIT. */
4599 static void
4600 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4601 VEC(basic_block,heap) **bbs_p)
4603 basic_block son;
4605 for (son = first_dom_son (CDI_DOMINATORS, entry);
4606 son;
4607 son = next_dom_son (CDI_DOMINATORS, son))
4609 VEC_safe_push (basic_block, heap, *bbs_p, son);
4610 if (son != exit)
4611 gather_blocks_in_sese_region (son, exit, bbs_p);
4616 struct move_stmt_d
4618 tree block;
4619 tree from_context;
4620 tree to_context;
4621 bitmap vars_to_remove;
4622 htab_t new_label_map;
4623 bool remap_decls_p;
4626 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4627 contained in *TP and change the DECL_CONTEXT of every local
4628 variable referenced in *TP. */
4630 static tree
4631 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4633 struct move_stmt_d *p = (struct move_stmt_d *) data;
4634 tree t = *tp;
4636 if (p->block
4637 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
4638 TREE_BLOCK (t) = p->block;
4640 if (OMP_DIRECTIVE_P (t)
4641 && TREE_CODE (t) != OMP_RETURN
4642 && TREE_CODE (t) != OMP_CONTINUE)
4644 /* Do not remap variables inside OMP directives. Variables
4645 referenced in clauses and directive header belong to the
4646 parent function and should not be moved into the child
4647 function. */
4648 bool save_remap_decls_p = p->remap_decls_p;
4649 p->remap_decls_p = false;
4650 *walk_subtrees = 0;
4652 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4654 p->remap_decls_p = save_remap_decls_p;
4656 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4658 if (TREE_CODE (t) == LABEL_DECL)
4660 if (p->new_label_map)
4662 struct tree_map in, *out;
4663 in.from = t;
4664 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4665 if (out)
4666 *tp = t = out->to;
4669 DECL_CONTEXT (t) = p->to_context;
4671 else if (p->remap_decls_p)
4673 DECL_CONTEXT (t) = p->to_context;
4675 if (TREE_CODE (t) == VAR_DECL)
4677 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4678 f->unexpanded_var_list
4679 = tree_cons (0, t, f->unexpanded_var_list);
4681 /* Mark T to be removed from the original function,
4682 otherwise it will be given a DECL_RTL when the
4683 original function is expanded. */
4684 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4688 else if (TYPE_P (t))
4689 *walk_subtrees = 0;
4691 return NULL_TREE;
4695 /* Move basic block BB from function CFUN to function DEST_FN. The
4696 block is moved out of the original linked list and placed after
4697 block AFTER in the new list. Also, the block is removed from the
4698 original array of blocks and placed in DEST_FN's array of blocks.
4699 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4700 updated to reflect the moved edges.
4702 On exit, local variables that need to be removed from
4703 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4705 static void
4706 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4707 basic_block after, bool update_edge_count_p,
4708 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4710 struct control_flow_graph *cfg;
4711 edge_iterator ei;
4712 edge e;
4713 block_stmt_iterator si;
4714 struct move_stmt_d d;
4715 unsigned old_len, new_len;
4717 /* Link BB to the new linked list. */
4718 move_block_after (bb, after);
4720 /* Update the edge count in the corresponding flowgraphs. */
4721 if (update_edge_count_p)
4722 FOR_EACH_EDGE (e, ei, bb->succs)
4724 cfun->cfg->x_n_edges--;
4725 dest_cfun->cfg->x_n_edges++;
4728 /* Remove BB from the original basic block array. */
4729 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4730 cfun->cfg->x_n_basic_blocks--;
4732 /* Grow DEST_CFUN's basic block array if needed. */
4733 cfg = dest_cfun->cfg;
4734 cfg->x_n_basic_blocks++;
4735 if (bb->index > cfg->x_last_basic_block)
4736 cfg->x_last_basic_block = bb->index;
4738 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4739 if ((unsigned) cfg->x_last_basic_block >= old_len)
4741 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4742 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
4743 new_len);
4746 VEC_replace (basic_block, cfg->x_basic_block_info,
4747 cfg->x_last_basic_block, bb);
4749 /* The statements in BB need to be associated with a new TREE_BLOCK.
4750 Labels need to be associated with a new label-to-block map. */
4751 memset (&d, 0, sizeof (d));
4752 d.vars_to_remove = vars_to_remove;
4754 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4756 tree stmt = bsi_stmt (si);
4757 int region;
4759 d.from_context = cfun->decl;
4760 d.to_context = dest_cfun->decl;
4761 d.remap_decls_p = true;
4762 d.new_label_map = new_label_map;
4763 if (TREE_BLOCK (stmt))
4764 d.block = DECL_INITIAL (dest_cfun->decl);
4766 walk_tree (&stmt, move_stmt_r, &d, NULL);
4768 if (TREE_CODE (stmt) == LABEL_EXPR)
4770 tree label = LABEL_EXPR_LABEL (stmt);
4771 int uid = LABEL_DECL_UID (label);
4773 gcc_assert (uid > -1);
4775 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4776 if (old_len <= (unsigned) uid)
4778 new_len = 3 * uid / 2;
4779 VEC_safe_grow_cleared (basic_block, gc,
4780 cfg->x_label_to_block_map, new_len);
4783 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4784 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4786 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4788 if (uid >= dest_cfun->last_label_uid)
4789 dest_cfun->last_label_uid = uid + 1;
4791 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4792 TREE_OPERAND (stmt, 0) =
4793 build_int_cst (NULL_TREE,
4794 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4795 + eh_offset);
4797 region = lookup_stmt_eh_region (stmt);
4798 if (region >= 0)
4800 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4801 remove_stmt_from_eh_region (stmt);
4802 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
4803 gimple_remove_stmt_histograms (cfun, stmt);
4808 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4809 the outermost EH region. Use REGION as the incoming base EH region. */
4811 static int
4812 find_outermost_region_in_block (struct function *src_cfun,
4813 basic_block bb, int region)
4815 block_stmt_iterator si;
4817 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4819 tree stmt = bsi_stmt (si);
4820 int stmt_region;
4822 if (TREE_CODE (stmt) == RESX_EXPR)
4823 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4824 else
4825 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4826 if (stmt_region > 0)
4828 if (region < 0)
4829 region = stmt_region;
4830 else if (stmt_region != region)
4832 region = eh_region_outermost (src_cfun, stmt_region, region);
4833 gcc_assert (region != -1);
4838 return region;
4841 static tree
4842 new_label_mapper (tree decl, void *data)
4844 htab_t hash = (htab_t) data;
4845 struct tree_map *m;
4846 void **slot;
4848 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4850 m = xmalloc (sizeof (struct tree_map));
4851 m->hash = DECL_UID (decl);
4852 m->from = decl;
4853 m->to = create_artificial_label ();
4854 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4856 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4857 gcc_assert (*slot == NULL);
4859 *slot = m;
4861 return m->to;
4864 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4865 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4866 single basic block in the original CFG and the new basic block is
4867 returned. DEST_CFUN must not have a CFG yet.
4869 Note that the region need not be a pure SESE region. Blocks inside
4870 the region may contain calls to abort/exit. The only restriction
4871 is that ENTRY_BB should be the only entry point and it must
4872 dominate EXIT_BB.
4874 All local variables referenced in the region are assumed to be in
4875 the corresponding BLOCK_VARS and unexpanded variable lists
4876 associated with DEST_CFUN. */
4878 basic_block
4879 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4880 basic_block exit_bb)
4882 VEC(basic_block,heap) *bbs;
4883 basic_block after, bb, *entry_pred, *exit_succ;
4884 struct function *saved_cfun;
4885 int *entry_flag, *exit_flag, eh_offset;
4886 unsigned i, num_entry_edges, num_exit_edges;
4887 edge e;
4888 edge_iterator ei;
4889 bitmap vars_to_remove;
4890 htab_t new_label_map;
4892 saved_cfun = cfun;
4894 /* Collect all the blocks in the region. Manually add ENTRY_BB
4895 because it won't be added by dfs_enumerate_from. */
4896 calculate_dominance_info (CDI_DOMINATORS);
4898 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4899 region. */
4900 gcc_assert (entry_bb != exit_bb
4901 && (!exit_bb
4902 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4904 bbs = NULL;
4905 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4906 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4908 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4909 the predecessor edges to ENTRY_BB and the successor edges to
4910 EXIT_BB so that we can re-attach them to the new basic block that
4911 will replace the region. */
4912 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4913 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4914 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4915 i = 0;
4916 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4918 entry_flag[i] = e->flags;
4919 entry_pred[i++] = e->src;
4920 remove_edge (e);
4923 if (exit_bb)
4925 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4926 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4927 sizeof (basic_block));
4928 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4929 i = 0;
4930 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4932 exit_flag[i] = e->flags;
4933 exit_succ[i++] = e->dest;
4934 remove_edge (e);
4937 else
4939 num_exit_edges = 0;
4940 exit_succ = NULL;
4941 exit_flag = NULL;
4944 /* Switch context to the child function to initialize DEST_FN's CFG. */
4945 gcc_assert (dest_cfun->cfg == NULL);
4946 cfun = dest_cfun;
4948 init_empty_tree_cfg ();
4950 /* Initialize EH information for the new function. */
4951 eh_offset = 0;
4952 new_label_map = NULL;
4953 if (saved_cfun->eh)
4955 int region = -1;
4957 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4958 region = find_outermost_region_in_block (saved_cfun, bb, region);
4960 init_eh_for_function ();
4961 if (region != -1)
4963 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4964 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4965 new_label_map, region, 0);
4969 cfun = saved_cfun;
4971 /* Move blocks from BBS into DEST_CFUN. */
4972 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4973 after = dest_cfun->cfg->x_entry_block_ptr;
4974 vars_to_remove = BITMAP_ALLOC (NULL);
4975 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4977 /* No need to update edge counts on the last block. It has
4978 already been updated earlier when we detached the region from
4979 the original CFG. */
4980 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4981 new_label_map, eh_offset);
4982 after = bb;
4985 if (new_label_map)
4986 htab_delete (new_label_map);
4988 /* Remove the variables marked in VARS_TO_REMOVE from
4989 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4990 DECL_RTL in the context of CFUN. */
4991 if (!bitmap_empty_p (vars_to_remove))
4993 tree *p;
4995 for (p = &cfun->unexpanded_var_list; *p; )
4997 tree var = TREE_VALUE (*p);
4998 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
5000 *p = TREE_CHAIN (*p);
5001 continue;
5004 p = &TREE_CHAIN (*p);
5008 BITMAP_FREE (vars_to_remove);
5010 /* Rewire the entry and exit blocks. The successor to the entry
5011 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
5012 the child function. Similarly, the predecessor of DEST_FN's
5013 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
5014 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
5015 various CFG manipulation function get to the right CFG.
5017 FIXME, this is silly. The CFG ought to become a parameter to
5018 these helpers. */
5019 cfun = dest_cfun;
5020 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5021 if (exit_bb)
5022 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
5023 cfun = saved_cfun;
5025 /* Back in the original function, the SESE region has disappeared,
5026 create a new basic block in its place. */
5027 bb = create_empty_bb (entry_pred[0]);
5028 for (i = 0; i < num_entry_edges; i++)
5029 make_edge (entry_pred[i], bb, entry_flag[i]);
5031 for (i = 0; i < num_exit_edges; i++)
5032 make_edge (bb, exit_succ[i], exit_flag[i]);
5034 if (exit_bb)
5036 free (exit_flag);
5037 free (exit_succ);
5039 free (entry_flag);
5040 free (entry_pred);
5041 free_dominance_info (CDI_DOMINATORS);
5042 free_dominance_info (CDI_POST_DOMINATORS);
5043 VEC_free (basic_block, heap, bbs);
5045 return bb;
5049 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5051 void
5052 dump_function_to_file (tree fn, FILE *file, int flags)
5054 tree arg, vars, var;
5055 bool ignore_topmost_bind = false, any_var = false;
5056 basic_block bb;
5057 tree chain;
5058 struct function *saved_cfun;
5060 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5062 arg = DECL_ARGUMENTS (fn);
5063 while (arg)
5065 print_generic_expr (file, arg, dump_flags);
5066 if (TREE_CHAIN (arg))
5067 fprintf (file, ", ");
5068 arg = TREE_CHAIN (arg);
5070 fprintf (file, ")\n");
5072 if (flags & TDF_DETAILS)
5073 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5074 if (flags & TDF_RAW)
5076 dump_node (fn, TDF_SLIM | flags, file);
5077 return;
5080 /* Switch CFUN to point to FN. */
5081 saved_cfun = cfun;
5082 cfun = DECL_STRUCT_FUNCTION (fn);
5084 /* When GIMPLE is lowered, the variables are no longer available in
5085 BIND_EXPRs, so display them separately. */
5086 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5088 ignore_topmost_bind = true;
5090 fprintf (file, "{\n");
5091 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5093 var = TREE_VALUE (vars);
5095 print_generic_decl (file, var, flags);
5096 fprintf (file, "\n");
5098 any_var = true;
5102 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5104 /* Make a CFG based dump. */
5105 check_bb_profile (ENTRY_BLOCK_PTR, file);
5106 if (!ignore_topmost_bind)
5107 fprintf (file, "{\n");
5109 if (any_var && n_basic_blocks)
5110 fprintf (file, "\n");
5112 FOR_EACH_BB (bb)
5113 dump_generic_bb (file, bb, 2, flags);
5115 fprintf (file, "}\n");
5116 check_bb_profile (EXIT_BLOCK_PTR, file);
5118 else
5120 int indent;
5122 /* Make a tree based dump. */
5123 chain = DECL_SAVED_TREE (fn);
5125 if (chain && TREE_CODE (chain) == BIND_EXPR)
5127 if (ignore_topmost_bind)
5129 chain = BIND_EXPR_BODY (chain);
5130 indent = 2;
5132 else
5133 indent = 0;
5135 else
5137 if (!ignore_topmost_bind)
5138 fprintf (file, "{\n");
5139 indent = 2;
5142 if (any_var)
5143 fprintf (file, "\n");
5145 print_generic_stmt_indented (file, chain, flags, indent);
5146 if (ignore_topmost_bind)
5147 fprintf (file, "}\n");
5150 fprintf (file, "\n\n");
5152 /* Restore CFUN. */
5153 cfun = saved_cfun;
5157 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5159 void
5160 debug_function (tree fn, int flags)
5162 dump_function_to_file (fn, stderr, flags);
5166 /* Pretty print of the loops intermediate representation. */
5167 static void print_loop (FILE *, struct loop *, int);
5168 static void print_pred_bbs (FILE *, basic_block bb);
5169 static void print_succ_bbs (FILE *, basic_block bb);
5172 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5174 static void
5175 print_pred_bbs (FILE *file, basic_block bb)
5177 edge e;
5178 edge_iterator ei;
5180 FOR_EACH_EDGE (e, ei, bb->preds)
5181 fprintf (file, "bb_%d ", e->src->index);
5185 /* Print on FILE the indexes for the successors of basic_block BB. */
5187 static void
5188 print_succ_bbs (FILE *file, basic_block bb)
5190 edge e;
5191 edge_iterator ei;
5193 FOR_EACH_EDGE (e, ei, bb->succs)
5194 fprintf (file, "bb_%d ", e->dest->index);
5198 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5200 static void
5201 print_loop (FILE *file, struct loop *loop, int indent)
5203 char *s_indent;
5204 basic_block bb;
5206 if (loop == NULL)
5207 return;
5209 s_indent = (char *) alloca ((size_t) indent + 1);
5210 memset ((void *) s_indent, ' ', (size_t) indent);
5211 s_indent[indent] = '\0';
5213 /* Print the loop's header. */
5214 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5216 /* Print the loop's body. */
5217 fprintf (file, "%s{\n", s_indent);
5218 FOR_EACH_BB (bb)
5219 if (bb->loop_father == loop)
5221 /* Print the basic_block's header. */
5222 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5223 print_pred_bbs (file, bb);
5224 fprintf (file, "}, succs = {");
5225 print_succ_bbs (file, bb);
5226 fprintf (file, "})\n");
5228 /* Print the basic_block's body. */
5229 fprintf (file, "%s {\n", s_indent);
5230 tree_dump_bb (bb, file, indent + 4);
5231 fprintf (file, "%s }\n", s_indent);
5234 print_loop (file, loop->inner, indent + 2);
5235 fprintf (file, "%s}\n", s_indent);
5236 print_loop (file, loop->next, indent);
5240 /* Follow a CFG edge from the entry point of the program, and on entry
5241 of a loop, pretty print the loop structure on FILE. */
5243 void
5244 print_loop_ir (FILE *file)
5246 basic_block bb;
5248 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5249 if (bb && bb->loop_father)
5250 print_loop (file, bb->loop_father, 0);
5254 /* Debugging loops structure at tree level. */
5256 void
5257 debug_loop_ir (void)
5259 print_loop_ir (stderr);
5263 /* Return true if BB ends with a call, possibly followed by some
5264 instructions that must stay with the call. Return false,
5265 otherwise. */
5267 static bool
5268 tree_block_ends_with_call_p (basic_block bb)
5270 block_stmt_iterator bsi = bsi_last (bb);
5271 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5275 /* Return true if BB ends with a conditional branch. Return false,
5276 otherwise. */
5278 static bool
5279 tree_block_ends_with_condjump_p (basic_block bb)
5281 tree stmt = last_stmt (bb);
5282 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5286 /* Return true if we need to add fake edge to exit at statement T.
5287 Helper function for tree_flow_call_edges_add. */
5289 static bool
5290 need_fake_edge_p (tree t)
5292 tree call;
5294 /* NORETURN and LONGJMP calls already have an edge to exit.
5295 CONST and PURE calls do not need one.
5296 We don't currently check for CONST and PURE here, although
5297 it would be a good idea, because those attributes are
5298 figured out from the RTL in mark_constant_function, and
5299 the counter incrementation code from -fprofile-arcs
5300 leads to different results from -fbranch-probabilities. */
5301 call = get_call_expr_in (t);
5302 if (call
5303 && !(call_expr_flags (call) & ECF_NORETURN))
5304 return true;
5306 if (TREE_CODE (t) == ASM_EXPR
5307 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5308 return true;
5310 return false;
5314 /* Add fake edges to the function exit for any non constant and non
5315 noreturn calls, volatile inline assembly in the bitmap of blocks
5316 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5317 the number of blocks that were split.
5319 The goal is to expose cases in which entering a basic block does
5320 not imply that all subsequent instructions must be executed. */
5322 static int
5323 tree_flow_call_edges_add (sbitmap blocks)
5325 int i;
5326 int blocks_split = 0;
5327 int last_bb = last_basic_block;
5328 bool check_last_block = false;
5330 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5331 return 0;
5333 if (! blocks)
5334 check_last_block = true;
5335 else
5336 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5338 /* In the last basic block, before epilogue generation, there will be
5339 a fallthru edge to EXIT. Special care is required if the last insn
5340 of the last basic block is a call because make_edge folds duplicate
5341 edges, which would result in the fallthru edge also being marked
5342 fake, which would result in the fallthru edge being removed by
5343 remove_fake_edges, which would result in an invalid CFG.
5345 Moreover, we can't elide the outgoing fake edge, since the block
5346 profiler needs to take this into account in order to solve the minimal
5347 spanning tree in the case that the call doesn't return.
5349 Handle this by adding a dummy instruction in a new last basic block. */
5350 if (check_last_block)
5352 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5353 block_stmt_iterator bsi = bsi_last (bb);
5354 tree t = NULL_TREE;
5355 if (!bsi_end_p (bsi))
5356 t = bsi_stmt (bsi);
5358 if (t && need_fake_edge_p (t))
5360 edge e;
5362 e = find_edge (bb, EXIT_BLOCK_PTR);
5363 if (e)
5365 bsi_insert_on_edge (e, build_empty_stmt ());
5366 bsi_commit_edge_inserts ();
5371 /* Now add fake edges to the function exit for any non constant
5372 calls since there is no way that we can determine if they will
5373 return or not... */
5374 for (i = 0; i < last_bb; i++)
5376 basic_block bb = BASIC_BLOCK (i);
5377 block_stmt_iterator bsi;
5378 tree stmt, last_stmt;
5380 if (!bb)
5381 continue;
5383 if (blocks && !TEST_BIT (blocks, i))
5384 continue;
5386 bsi = bsi_last (bb);
5387 if (!bsi_end_p (bsi))
5389 last_stmt = bsi_stmt (bsi);
5392 stmt = bsi_stmt (bsi);
5393 if (need_fake_edge_p (stmt))
5395 edge e;
5396 /* The handling above of the final block before the
5397 epilogue should be enough to verify that there is
5398 no edge to the exit block in CFG already.
5399 Calling make_edge in such case would cause us to
5400 mark that edge as fake and remove it later. */
5401 #ifdef ENABLE_CHECKING
5402 if (stmt == last_stmt)
5404 e = find_edge (bb, EXIT_BLOCK_PTR);
5405 gcc_assert (e == NULL);
5407 #endif
5409 /* Note that the following may create a new basic block
5410 and renumber the existing basic blocks. */
5411 if (stmt != last_stmt)
5413 e = split_block (bb, stmt);
5414 if (e)
5415 blocks_split++;
5417 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5419 bsi_prev (&bsi);
5421 while (!bsi_end_p (bsi));
5425 if (blocks_split)
5426 verify_flow_info ();
5428 return blocks_split;
5431 /* Purge dead abnormal call edges from basic block BB. */
5433 bool
5434 tree_purge_dead_abnormal_call_edges (basic_block bb)
5436 bool changed = tree_purge_dead_eh_edges (bb);
5438 if (current_function_has_nonlocal_label)
5440 tree stmt = last_stmt (bb);
5441 edge_iterator ei;
5442 edge e;
5444 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5445 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5447 if (e->flags & EDGE_ABNORMAL)
5449 remove_edge (e);
5450 changed = true;
5452 else
5453 ei_next (&ei);
5456 /* See tree_purge_dead_eh_edges below. */
5457 if (changed)
5458 free_dominance_info (CDI_DOMINATORS);
5461 return changed;
5464 /* Purge dead EH edges from basic block BB. */
5466 bool
5467 tree_purge_dead_eh_edges (basic_block bb)
5469 bool changed = false;
5470 edge e;
5471 edge_iterator ei;
5472 tree stmt = last_stmt (bb);
5474 if (stmt && tree_can_throw_internal (stmt))
5475 return false;
5477 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5479 if (e->flags & EDGE_EH)
5481 remove_edge (e);
5482 changed = true;
5484 else
5485 ei_next (&ei);
5488 /* Removal of dead EH edges might change dominators of not
5489 just immediate successors. E.g. when bb1 is changed so that
5490 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5491 eh edges purged by this function in:
5495 1-->2
5496 / \ |
5497 v v |
5498 3-->4 |
5500 --->5
5503 idom(bb5) must be recomputed. For now just free the dominance
5504 info. */
5505 if (changed)
5506 free_dominance_info (CDI_DOMINATORS);
5508 return changed;
5511 bool
5512 tree_purge_all_dead_eh_edges (bitmap blocks)
5514 bool changed = false;
5515 unsigned i;
5516 bitmap_iterator bi;
5518 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5520 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5523 return changed;
5526 /* This function is called whenever a new edge is created or
5527 redirected. */
5529 static void
5530 tree_execute_on_growing_pred (edge e)
5532 basic_block bb = e->dest;
5534 if (phi_nodes (bb))
5535 reserve_phi_args_for_new_edge (bb);
5538 /* This function is called immediately before edge E is removed from
5539 the edge vector E->dest->preds. */
5541 static void
5542 tree_execute_on_shrinking_pred (edge e)
5544 if (phi_nodes (e->dest))
5545 remove_phi_args (e);
5548 /*---------------------------------------------------------------------------
5549 Helper functions for Loop versioning
5550 ---------------------------------------------------------------------------*/
5552 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5553 of 'first'. Both of them are dominated by 'new_head' basic block. When
5554 'new_head' was created by 'second's incoming edge it received phi arguments
5555 on the edge by split_edge(). Later, additional edge 'e' was created to
5556 connect 'new_head' and 'first'. Now this routine adds phi args on this
5557 additional edge 'e' that new_head to second edge received as part of edge
5558 splitting.
5561 static void
5562 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5563 basic_block new_head, edge e)
5565 tree phi1, phi2;
5566 edge e2 = find_edge (new_head, second);
5568 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5569 edge, we should always have an edge from NEW_HEAD to SECOND. */
5570 gcc_assert (e2 != NULL);
5572 /* Browse all 'second' basic block phi nodes and add phi args to
5573 edge 'e' for 'first' head. PHI args are always in correct order. */
5575 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5576 phi2 && phi1;
5577 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5579 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5580 add_phi_arg (phi1, def, e);
5584 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5585 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5586 the destination of the ELSE part. */
5587 static void
5588 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5589 basic_block cond_bb, void *cond_e)
5591 block_stmt_iterator bsi;
5592 tree goto1 = NULL_TREE;
5593 tree goto2 = NULL_TREE;
5594 tree new_cond_expr = NULL_TREE;
5595 tree cond_expr = (tree) cond_e;
5596 edge e0;
5598 /* Build new conditional expr */
5599 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5600 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5601 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5603 /* Add new cond in cond_bb. */
5604 bsi = bsi_start (cond_bb);
5605 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5606 /* Adjust edges appropriately to connect new head with first head
5607 as well as second head. */
5608 e0 = single_succ_edge (cond_bb);
5609 e0->flags &= ~EDGE_FALLTHRU;
5610 e0->flags |= EDGE_FALSE_VALUE;
5613 struct cfg_hooks tree_cfg_hooks = {
5614 "tree",
5615 tree_verify_flow_info,
5616 tree_dump_bb, /* dump_bb */
5617 create_bb, /* create_basic_block */
5618 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5619 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5620 remove_bb, /* delete_basic_block */
5621 tree_split_block, /* split_block */
5622 tree_move_block_after, /* move_block_after */
5623 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5624 tree_merge_blocks, /* merge_blocks */
5625 tree_predict_edge, /* predict_edge */
5626 tree_predicted_by_p, /* predicted_by_p */
5627 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5628 tree_duplicate_bb, /* duplicate_block */
5629 tree_split_edge, /* split_edge */
5630 tree_make_forwarder_block, /* make_forward_block */
5631 NULL, /* tidy_fallthru_edge */
5632 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5633 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5634 tree_flow_call_edges_add, /* flow_call_edges_add */
5635 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5636 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5637 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5638 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5639 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5640 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5641 flush_pending_stmts /* flush_pending_stmts */
5645 /* Split all critical edges. */
5647 static unsigned int
5648 split_critical_edges (void)
5650 basic_block bb;
5651 edge e;
5652 edge_iterator ei;
5654 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5655 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5656 mappings around the calls to split_edge. */
5657 start_recording_case_labels ();
5658 FOR_ALL_BB (bb)
5660 FOR_EACH_EDGE (e, ei, bb->succs)
5661 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5663 split_edge (e);
5666 end_recording_case_labels ();
5667 return 0;
5670 struct tree_opt_pass pass_split_crit_edges =
5672 "crited", /* name */
5673 NULL, /* gate */
5674 split_critical_edges, /* execute */
5675 NULL, /* sub */
5676 NULL, /* next */
5677 0, /* static_pass_number */
5678 TV_TREE_SPLIT_EDGES, /* tv_id */
5679 PROP_cfg, /* properties required */
5680 PROP_no_crit_edges, /* properties_provided */
5681 0, /* properties_destroyed */
5682 0, /* todo_flags_start */
5683 TODO_dump_func, /* todo_flags_finish */
5684 0 /* letter */
5688 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5689 a temporary, make sure and register it to be renamed if necessary,
5690 and finally return the temporary. Put the statements to compute
5691 EXP before the current statement in BSI. */
5693 tree
5694 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5696 tree t, new_stmt, orig_stmt;
5698 if (is_gimple_val (exp))
5699 return exp;
5701 t = make_rename_temp (type, NULL);
5702 new_stmt = build2_gimple (GIMPLE_MODIFY_STMT, t, exp);
5704 orig_stmt = bsi_stmt (*bsi);
5705 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5706 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5708 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5709 if (gimple_in_ssa_p (cfun))
5710 mark_symbols_for_renaming (new_stmt);
5712 return t;
5715 /* Build a ternary operation and gimplify it. Emit code before BSI.
5716 Return the gimple_val holding the result. */
5718 tree
5719 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5720 tree type, tree a, tree b, tree c)
5722 tree ret;
5724 ret = fold_build3 (code, type, a, b, c);
5725 STRIP_NOPS (ret);
5727 return gimplify_val (bsi, type, ret);
5730 /* Build a binary operation and gimplify it. Emit code before BSI.
5731 Return the gimple_val holding the result. */
5733 tree
5734 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5735 tree type, tree a, tree b)
5737 tree ret;
5739 ret = fold_build2 (code, type, a, b);
5740 STRIP_NOPS (ret);
5742 return gimplify_val (bsi, type, ret);
5745 /* Build a unary operation and gimplify it. Emit code before BSI.
5746 Return the gimple_val holding the result. */
5748 tree
5749 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5750 tree a)
5752 tree ret;
5754 ret = fold_build1 (code, type, a);
5755 STRIP_NOPS (ret);
5757 return gimplify_val (bsi, type, ret);
5762 /* Emit return warnings. */
5764 static unsigned int
5765 execute_warn_function_return (void)
5767 #ifdef USE_MAPPED_LOCATION
5768 source_location location;
5769 #else
5770 location_t *locus;
5771 #endif
5772 tree last;
5773 edge e;
5774 edge_iterator ei;
5776 /* If we have a path to EXIT, then we do return. */
5777 if (TREE_THIS_VOLATILE (cfun->decl)
5778 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5780 #ifdef USE_MAPPED_LOCATION
5781 location = UNKNOWN_LOCATION;
5782 #else
5783 locus = NULL;
5784 #endif
5785 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5787 last = last_stmt (e->src);
5788 if (TREE_CODE (last) == RETURN_EXPR
5789 #ifdef USE_MAPPED_LOCATION
5790 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5791 #else
5792 && (locus = EXPR_LOCUS (last)) != NULL)
5793 #endif
5794 break;
5796 #ifdef USE_MAPPED_LOCATION
5797 if (location == UNKNOWN_LOCATION)
5798 location = cfun->function_end_locus;
5799 warning (0, "%H%<noreturn%> function does return", &location);
5800 #else
5801 if (!locus)
5802 locus = &cfun->function_end_locus;
5803 warning (0, "%H%<noreturn%> function does return", locus);
5804 #endif
5807 /* If we see "return;" in some basic block, then we do reach the end
5808 without returning a value. */
5809 else if (warn_return_type
5810 && !TREE_NO_WARNING (cfun->decl)
5811 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5812 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5814 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5816 tree last = last_stmt (e->src);
5817 if (TREE_CODE (last) == RETURN_EXPR
5818 && TREE_OPERAND (last, 0) == NULL
5819 && !TREE_NO_WARNING (last))
5821 #ifdef USE_MAPPED_LOCATION
5822 location = EXPR_LOCATION (last);
5823 if (location == UNKNOWN_LOCATION)
5824 location = cfun->function_end_locus;
5825 warning (0, "%Hcontrol reaches end of non-void function", &location);
5826 #else
5827 locus = EXPR_LOCUS (last);
5828 if (!locus)
5829 locus = &cfun->function_end_locus;
5830 warning (0, "%Hcontrol reaches end of non-void function", locus);
5831 #endif
5832 TREE_NO_WARNING (cfun->decl) = 1;
5833 break;
5837 return 0;
5841 /* Given a basic block B which ends with a conditional and has
5842 precisely two successors, determine which of the edges is taken if
5843 the conditional is true and which is taken if the conditional is
5844 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5846 void
5847 extract_true_false_edges_from_block (basic_block b,
5848 edge *true_edge,
5849 edge *false_edge)
5851 edge e = EDGE_SUCC (b, 0);
5853 if (e->flags & EDGE_TRUE_VALUE)
5855 *true_edge = e;
5856 *false_edge = EDGE_SUCC (b, 1);
5858 else
5860 *false_edge = e;
5861 *true_edge = EDGE_SUCC (b, 1);
5865 struct tree_opt_pass pass_warn_function_return =
5867 NULL, /* name */
5868 NULL, /* gate */
5869 execute_warn_function_return, /* execute */
5870 NULL, /* sub */
5871 NULL, /* next */
5872 0, /* static_pass_number */
5873 0, /* tv_id */
5874 PROP_cfg, /* properties_required */
5875 0, /* properties_provided */
5876 0, /* properties_destroyed */
5877 0, /* todo_flags_start */
5878 0, /* todo_flags_finish */
5879 0 /* letter */
5882 /* Emit noreturn warnings. */
5884 static unsigned int
5885 execute_warn_function_noreturn (void)
5887 if (warn_missing_noreturn
5888 && !TREE_THIS_VOLATILE (cfun->decl)
5889 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5890 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5891 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5892 "for attribute %<noreturn%>",
5893 cfun->decl);
5894 return 0;
5897 struct tree_opt_pass pass_warn_function_noreturn =
5899 NULL, /* name */
5900 NULL, /* gate */
5901 execute_warn_function_noreturn, /* execute */
5902 NULL, /* sub */
5903 NULL, /* next */
5904 0, /* static_pass_number */
5905 0, /* tv_id */
5906 PROP_cfg, /* properties_required */
5907 0, /* properties_provided */
5908 0, /* properties_destroyed */
5909 0, /* todo_flags_start */
5910 0, /* todo_flags_finish */
5911 0 /* letter */