* doc/loop.texi: Document number_of_latch_executions and
[official-gcc.git] / gcc / tree-cfg.c
blob59a32ea6fff67d1bc7e61833e8a8b1c6de5bbf67
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 struct edge_to_cases_elt
73 /* The edge itself. Necessary for hashing and equality tests. */
74 edge e;
76 /* The case labels associated with this edge. We link these up via
77 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78 when we destroy the hash table. This prevents problems when copying
79 SWITCH_EXPRs. */
80 tree case_labels;
83 static htab_t edge_to_cases;
85 /* CFG statistics. */
86 struct cfg_stats_d
88 long num_merged_labels;
91 static struct cfg_stats_d cfg_stats;
93 /* Nonzero if we found a computed goto while building basic blocks. */
94 static bool found_computed_goto;
96 /* Basic blocks and flowgraphs. */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
115 static inline void change_bb_for_stmt (tree t, basic_block bb);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static unsigned int
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 return 0;
234 struct tree_opt_pass pass_build_cfg =
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts, /* todo_flags_finish */
248 0 /* letter */
251 /* Search the CFG for any computed gotos. If found, factor them to a
252 common computed goto site. Also record the location of that site so
253 that we can un-factor the gotos after we have converted back to
254 normal form. */
256 static void
257 factor_computed_gotos (void)
259 basic_block bb;
260 tree factored_label_decl = NULL;
261 tree var = NULL;
262 tree factored_computed_goto_label = NULL;
263 tree factored_computed_goto = NULL;
265 /* We know there are one or more computed gotos in this function.
266 Examine the last statement in each basic block to see if the block
267 ends with a computed goto. */
269 FOR_EACH_BB (bb)
271 block_stmt_iterator bsi = bsi_last (bb);
272 tree last;
274 if (bsi_end_p (bsi))
275 continue;
276 last = bsi_stmt (bsi);
278 /* Ignore the computed goto we create when we factor the original
279 computed gotos. */
280 if (last == factored_computed_goto)
281 continue;
283 /* If the last statement is a computed goto, factor it. */
284 if (computed_goto_p (last))
286 tree assignment;
288 /* The first time we find a computed goto we need to create
289 the factored goto block and the variable each original
290 computed goto will use for their goto destination. */
291 if (! factored_computed_goto)
293 basic_block new_bb = create_empty_bb (bb);
294 block_stmt_iterator new_bsi = bsi_start (new_bb);
296 /* Create the destination of the factored goto. Each original
297 computed goto will put its desired destination into this
298 variable and jump to the label we create immediately
299 below. */
300 var = create_tmp_var (ptr_type_node, "gotovar");
302 /* Build a label for the new block which will contain the
303 factored computed goto. */
304 factored_label_decl = create_artificial_label ();
305 factored_computed_goto_label
306 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 BSI_NEW_STMT);
310 /* Build our new computed goto. */
311 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 bsi_insert_after (&new_bsi, factored_computed_goto,
313 BSI_NEW_STMT);
316 /* Copy the original computed goto's destination into VAR. */
317 assignment = build2_gimple (GIMPLE_MODIFY_STMT,
318 var, GOTO_DESTINATION (last));
319 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
321 /* And re-vector the computed goto to the new destination. */
322 GOTO_DESTINATION (last) = factored_label_decl;
328 /* Build a flowgraph for the statement_list STMT_LIST. */
330 static void
331 make_blocks (tree stmt_list)
333 tree_stmt_iterator i = tsi_start (stmt_list);
334 tree stmt = NULL;
335 bool start_new_block = true;
336 bool first_stmt_of_list = true;
337 basic_block bb = ENTRY_BLOCK_PTR;
339 while (!tsi_end_p (i))
341 tree prev_stmt;
343 prev_stmt = stmt;
344 stmt = tsi_stmt (i);
346 /* If the statement starts a new basic block or if we have determined
347 in a previous pass that we need to create a new block for STMT, do
348 so now. */
349 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
351 if (!first_stmt_of_list)
352 stmt_list = tsi_split_statement_list_before (&i);
353 bb = create_basic_block (stmt_list, NULL, bb);
354 start_new_block = false;
357 /* Now add STMT to BB and create the subgraphs for special statement
358 codes. */
359 set_bb_for_stmt (stmt, bb);
361 if (computed_goto_p (stmt))
362 found_computed_goto = true;
364 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 next iteration. */
366 if (stmt_ends_bb_p (stmt))
367 start_new_block = true;
369 tsi_next (&i);
370 first_stmt_of_list = false;
375 /* Create and return a new empty basic block after bb AFTER. */
377 static basic_block
378 create_bb (void *h, void *e, basic_block after)
380 basic_block bb;
382 gcc_assert (!e);
384 /* Create and initialize a new basic block. Since alloc_block uses
385 ggc_alloc_cleared to allocate a basic block, we do not have to
386 clear the newly allocated basic block here. */
387 bb = alloc_block ();
389 bb->index = last_basic_block;
390 bb->flags = BB_NEW;
391 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
399 size_t old_size = VEC_length (basic_block, basic_block_info);
400 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401 basic_block *p;
402 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403 p = VEC_address (basic_block, basic_block_info);
404 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
407 /* Add the newly created block to the array. */
408 SET_BASIC_BLOCK (last_basic_block, bb);
410 n_basic_blocks++;
411 last_basic_block++;
413 return bb;
417 /*---------------------------------------------------------------------------
418 Edge creation
419 ---------------------------------------------------------------------------*/
421 /* Fold COND_EXPR_COND of each COND_EXPR. */
423 void
424 fold_cond_expr_cond (void)
426 basic_block bb;
428 FOR_EACH_BB (bb)
430 tree stmt = last_stmt (bb);
432 if (stmt
433 && TREE_CODE (stmt) == COND_EXPR)
435 tree cond = fold (COND_EXPR_COND (stmt));
436 if (integer_zerop (cond))
437 COND_EXPR_COND (stmt) = boolean_false_node;
438 else if (integer_onep (cond))
439 COND_EXPR_COND (stmt) = boolean_true_node;
444 /* Join all the blocks in the flowgraph. */
446 static void
447 make_edges (void)
449 basic_block bb;
450 struct omp_region *cur_region = NULL;
452 /* Create an edge from entry to the first block with executable
453 statements in it. */
454 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
456 /* Traverse the basic block array placing edges. */
457 FOR_EACH_BB (bb)
459 tree last = last_stmt (bb);
460 bool fallthru;
462 if (last)
464 enum tree_code code = TREE_CODE (last);
465 switch (code)
467 case GOTO_EXPR:
468 make_goto_expr_edges (bb);
469 fallthru = false;
470 break;
471 case RETURN_EXPR:
472 make_edge (bb, EXIT_BLOCK_PTR, 0);
473 fallthru = false;
474 break;
475 case COND_EXPR:
476 make_cond_expr_edges (bb);
477 fallthru = false;
478 break;
479 case SWITCH_EXPR:
480 make_switch_expr_edges (bb);
481 fallthru = false;
482 break;
483 case RESX_EXPR:
484 make_eh_edges (last);
485 fallthru = false;
486 break;
488 case CALL_EXPR:
489 /* If this function receives a nonlocal goto, then we need to
490 make edges from this call site to all the nonlocal goto
491 handlers. */
492 if (tree_can_make_abnormal_goto (last))
493 make_abnormal_goto_edges (bb, true);
495 /* If this statement has reachable exception handlers, then
496 create abnormal edges to them. */
497 make_eh_edges (last);
499 /* Some calls are known not to return. */
500 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
501 break;
503 case MODIFY_EXPR:
504 gcc_unreachable ();
506 case GIMPLE_MODIFY_STMT:
507 if (is_ctrl_altering_stmt (last))
509 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
510 the CALL_EXPR may have an abnormal edge. Search the RHS
511 for this case and create any required edges. */
512 if (tree_can_make_abnormal_goto (last))
513 make_abnormal_goto_edges (bb, true);
515 make_eh_edges (last);
517 fallthru = true;
518 break;
520 case OMP_PARALLEL:
521 case OMP_FOR:
522 case OMP_SINGLE:
523 case OMP_MASTER:
524 case OMP_ORDERED:
525 case OMP_CRITICAL:
526 case OMP_SECTION:
527 cur_region = new_omp_region (bb, code, cur_region);
528 fallthru = true;
529 break;
531 case OMP_SECTIONS:
532 cur_region = new_omp_region (bb, code, cur_region);
533 fallthru = false;
534 break;
536 case OMP_RETURN:
537 /* In the case of an OMP_SECTION, the edge will go somewhere
538 other than the next block. This will be created later. */
539 cur_region->exit = bb;
540 fallthru = cur_region->type != OMP_SECTION;
541 cur_region = cur_region->outer;
542 break;
544 case OMP_CONTINUE:
545 cur_region->cont = bb;
546 switch (cur_region->type)
548 case OMP_FOR:
549 /* ??? Technically there should be a some sort of loopback
550 edge here, but it goes to a block that doesn't exist yet,
551 and without it, updating the ssa form would be a real
552 bear. Fortunately, we don't yet do ssa before expanding
553 these nodes. */
554 break;
556 case OMP_SECTIONS:
557 /* Wire up the edges into and out of the nested sections. */
558 /* ??? Similarly wrt loopback. */
560 struct omp_region *i;
561 for (i = cur_region->inner; i ; i = i->next)
563 gcc_assert (i->type == OMP_SECTION);
564 make_edge (cur_region->entry, i->entry, 0);
565 make_edge (i->exit, bb, EDGE_FALLTHRU);
568 break;
570 default:
571 gcc_unreachable ();
573 fallthru = true;
574 break;
576 default:
577 gcc_assert (!stmt_ends_bb_p (last));
578 fallthru = true;
581 else
582 fallthru = true;
584 if (fallthru)
585 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
588 if (root_omp_region)
589 free_omp_regions ();
591 /* Fold COND_EXPR_COND of each COND_EXPR. */
592 fold_cond_expr_cond ();
594 /* Clean up the graph and warn for unreachable code. */
595 cleanup_tree_cfg ();
599 /* Create the edges for a COND_EXPR starting at block BB.
600 At this point, both clauses must contain only simple gotos. */
602 static void
603 make_cond_expr_edges (basic_block bb)
605 tree entry = last_stmt (bb);
606 basic_block then_bb, else_bb;
607 tree then_label, else_label;
608 edge e;
610 gcc_assert (entry);
611 gcc_assert (TREE_CODE (entry) == COND_EXPR);
613 /* Entry basic blocks for each component. */
614 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
615 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
616 then_bb = label_to_block (then_label);
617 else_bb = label_to_block (else_label);
619 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
620 #ifdef USE_MAPPED_LOCATION
621 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
622 #else
623 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
624 #endif
625 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
626 if (e)
628 #ifdef USE_MAPPED_LOCATION
629 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
630 #else
631 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
632 #endif
636 /* Hashing routine for EDGE_TO_CASES. */
638 static hashval_t
639 edge_to_cases_hash (const void *p)
641 edge e = ((struct edge_to_cases_elt *)p)->e;
643 /* Hash on the edge itself (which is a pointer). */
644 return htab_hash_pointer (e);
647 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
648 for equality is just a pointer comparison. */
650 static int
651 edge_to_cases_eq (const void *p1, const void *p2)
653 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
654 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
656 return e1 == e2;
659 /* Called for each element in the hash table (P) as we delete the
660 edge to cases hash table.
662 Clear all the TREE_CHAINs to prevent problems with copying of
663 SWITCH_EXPRs and structure sharing rules, then free the hash table
664 element. */
666 static void
667 edge_to_cases_cleanup (void *p)
669 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
670 tree t, next;
672 for (t = elt->case_labels; t; t = next)
674 next = TREE_CHAIN (t);
675 TREE_CHAIN (t) = NULL;
677 free (p);
680 /* Start recording information mapping edges to case labels. */
682 void
683 start_recording_case_labels (void)
685 gcc_assert (edge_to_cases == NULL);
687 edge_to_cases = htab_create (37,
688 edge_to_cases_hash,
689 edge_to_cases_eq,
690 edge_to_cases_cleanup);
693 /* Return nonzero if we are recording information for case labels. */
695 static bool
696 recording_case_labels_p (void)
698 return (edge_to_cases != NULL);
701 /* Stop recording information mapping edges to case labels and
702 remove any information we have recorded. */
703 void
704 end_recording_case_labels (void)
706 htab_delete (edge_to_cases);
707 edge_to_cases = NULL;
710 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
712 static void
713 record_switch_edge (edge e, tree case_label)
715 struct edge_to_cases_elt *elt;
716 void **slot;
718 /* Build a hash table element so we can see if E is already
719 in the table. */
720 elt = XNEW (struct edge_to_cases_elt);
721 elt->e = e;
722 elt->case_labels = case_label;
724 slot = htab_find_slot (edge_to_cases, elt, INSERT);
726 if (*slot == NULL)
728 /* E was not in the hash table. Install E into the hash table. */
729 *slot = (void *)elt;
731 else
733 /* E was already in the hash table. Free ELT as we do not need it
734 anymore. */
735 free (elt);
737 /* Get the entry stored in the hash table. */
738 elt = (struct edge_to_cases_elt *) *slot;
740 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
741 TREE_CHAIN (case_label) = elt->case_labels;
742 elt->case_labels = case_label;
746 /* If we are inside a {start,end}_recording_cases block, then return
747 a chain of CASE_LABEL_EXPRs from T which reference E.
749 Otherwise return NULL. */
751 static tree
752 get_cases_for_edge (edge e, tree t)
754 struct edge_to_cases_elt elt, *elt_p;
755 void **slot;
756 size_t i, n;
757 tree vec;
759 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
760 chains available. Return NULL so the caller can detect this case. */
761 if (!recording_case_labels_p ())
762 return NULL;
764 restart:
765 elt.e = e;
766 elt.case_labels = NULL;
767 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
769 if (slot)
771 elt_p = (struct edge_to_cases_elt *)*slot;
772 return elt_p->case_labels;
775 /* If we did not find E in the hash table, then this must be the first
776 time we have been queried for information about E & T. Add all the
777 elements from T to the hash table then perform the query again. */
779 vec = SWITCH_LABELS (t);
780 n = TREE_VEC_LENGTH (vec);
781 for (i = 0; i < n; i++)
783 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
784 basic_block label_bb = label_to_block (lab);
785 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
787 goto restart;
790 /* Create the edges for a SWITCH_EXPR starting at block BB.
791 At this point, the switch body has been lowered and the
792 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
794 static void
795 make_switch_expr_edges (basic_block bb)
797 tree entry = last_stmt (bb);
798 size_t i, n;
799 tree vec;
801 vec = SWITCH_LABELS (entry);
802 n = TREE_VEC_LENGTH (vec);
804 for (i = 0; i < n; ++i)
806 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
807 basic_block label_bb = label_to_block (lab);
808 make_edge (bb, label_bb, 0);
813 /* Return the basic block holding label DEST. */
815 basic_block
816 label_to_block_fn (struct function *ifun, tree dest)
818 int uid = LABEL_DECL_UID (dest);
820 /* We would die hard when faced by an undefined label. Emit a label to
821 the very first basic block. This will hopefully make even the dataflow
822 and undefined variable warnings quite right. */
823 if ((errorcount || sorrycount) && uid < 0)
825 block_stmt_iterator bsi =
826 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
827 tree stmt;
829 stmt = build1 (LABEL_EXPR, void_type_node, dest);
830 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
831 uid = LABEL_DECL_UID (dest);
833 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
834 <= (unsigned int) uid)
835 return NULL;
836 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
839 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
840 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
842 void
843 make_abnormal_goto_edges (basic_block bb, bool for_call)
845 basic_block target_bb;
846 block_stmt_iterator bsi;
848 FOR_EACH_BB (target_bb)
849 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
851 tree target = bsi_stmt (bsi);
853 if (TREE_CODE (target) != LABEL_EXPR)
854 break;
856 target = LABEL_EXPR_LABEL (target);
858 /* Make an edge to every label block that has been marked as a
859 potential target for a computed goto or a non-local goto. */
860 if ((FORCED_LABEL (target) && !for_call)
861 || (DECL_NONLOCAL (target) && for_call))
863 make_edge (bb, target_bb, EDGE_ABNORMAL);
864 break;
869 /* Create edges for a goto statement at block BB. */
871 static void
872 make_goto_expr_edges (basic_block bb)
874 block_stmt_iterator last = bsi_last (bb);
875 tree goto_t = bsi_stmt (last);
877 /* A simple GOTO creates normal edges. */
878 if (simple_goto_p (goto_t))
880 tree dest = GOTO_DESTINATION (goto_t);
881 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
882 #ifdef USE_MAPPED_LOCATION
883 e->goto_locus = EXPR_LOCATION (goto_t);
884 #else
885 e->goto_locus = EXPR_LOCUS (goto_t);
886 #endif
887 bsi_remove (&last, true);
888 return;
891 /* A computed GOTO creates abnormal edges. */
892 make_abnormal_goto_edges (bb, false);
896 /*---------------------------------------------------------------------------
897 Flowgraph analysis
898 ---------------------------------------------------------------------------*/
900 /* Cleanup useless labels in basic blocks. This is something we wish
901 to do early because it allows us to group case labels before creating
902 the edges for the CFG, and it speeds up block statement iterators in
903 all passes later on.
904 We only run this pass once, running it more than once is probably not
905 profitable. */
907 /* A map from basic block index to the leading label of that block. */
908 static tree *label_for_bb;
910 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
911 static void
912 update_eh_label (struct eh_region *region)
914 tree old_label = get_eh_region_tree_label (region);
915 if (old_label)
917 tree new_label;
918 basic_block bb = label_to_block (old_label);
920 /* ??? After optimizing, there may be EH regions with labels
921 that have already been removed from the function body, so
922 there is no basic block for them. */
923 if (! bb)
924 return;
926 new_label = label_for_bb[bb->index];
927 set_eh_region_tree_label (region, new_label);
931 /* Given LABEL return the first label in the same basic block. */
932 static tree
933 main_block_label (tree label)
935 basic_block bb = label_to_block (label);
937 /* label_to_block possibly inserted undefined label into the chain. */
938 if (!label_for_bb[bb->index])
939 label_for_bb[bb->index] = label;
940 return label_for_bb[bb->index];
943 /* Cleanup redundant labels. This is a three-step process:
944 1) Find the leading label for each block.
945 2) Redirect all references to labels to the leading labels.
946 3) Cleanup all useless labels. */
948 void
949 cleanup_dead_labels (void)
951 basic_block bb;
952 label_for_bb = XCNEWVEC (tree, last_basic_block);
954 /* Find a suitable label for each block. We use the first user-defined
955 label if there is one, or otherwise just the first label we see. */
956 FOR_EACH_BB (bb)
958 block_stmt_iterator i;
960 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
962 tree label, stmt = bsi_stmt (i);
964 if (TREE_CODE (stmt) != LABEL_EXPR)
965 break;
967 label = LABEL_EXPR_LABEL (stmt);
969 /* If we have not yet seen a label for the current block,
970 remember this one and see if there are more labels. */
971 if (! label_for_bb[bb->index])
973 label_for_bb[bb->index] = label;
974 continue;
977 /* If we did see a label for the current block already, but it
978 is an artificially created label, replace it if the current
979 label is a user defined label. */
980 if (! DECL_ARTIFICIAL (label)
981 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
983 label_for_bb[bb->index] = label;
984 break;
989 /* Now redirect all jumps/branches to the selected label.
990 First do so for each block ending in a control statement. */
991 FOR_EACH_BB (bb)
993 tree stmt = last_stmt (bb);
994 if (!stmt)
995 continue;
997 switch (TREE_CODE (stmt))
999 case COND_EXPR:
1001 tree true_branch, false_branch;
1003 true_branch = COND_EXPR_THEN (stmt);
1004 false_branch = COND_EXPR_ELSE (stmt);
1006 GOTO_DESTINATION (true_branch)
1007 = main_block_label (GOTO_DESTINATION (true_branch));
1008 GOTO_DESTINATION (false_branch)
1009 = main_block_label (GOTO_DESTINATION (false_branch));
1011 break;
1014 case SWITCH_EXPR:
1016 size_t i;
1017 tree vec = SWITCH_LABELS (stmt);
1018 size_t n = TREE_VEC_LENGTH (vec);
1020 /* Replace all destination labels. */
1021 for (i = 0; i < n; ++i)
1023 tree elt = TREE_VEC_ELT (vec, i);
1024 tree label = main_block_label (CASE_LABEL (elt));
1025 CASE_LABEL (elt) = label;
1027 break;
1030 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1031 remove them until after we've created the CFG edges. */
1032 case GOTO_EXPR:
1033 if (! computed_goto_p (stmt))
1035 GOTO_DESTINATION (stmt)
1036 = main_block_label (GOTO_DESTINATION (stmt));
1037 break;
1040 default:
1041 break;
1045 for_each_eh_region (update_eh_label);
1047 /* Finally, purge dead labels. All user-defined labels and labels that
1048 can be the target of non-local gotos and labels which have their
1049 address taken are preserved. */
1050 FOR_EACH_BB (bb)
1052 block_stmt_iterator i;
1053 tree label_for_this_bb = label_for_bb[bb->index];
1055 if (! label_for_this_bb)
1056 continue;
1058 for (i = bsi_start (bb); !bsi_end_p (i); )
1060 tree label, stmt = bsi_stmt (i);
1062 if (TREE_CODE (stmt) != LABEL_EXPR)
1063 break;
1065 label = LABEL_EXPR_LABEL (stmt);
1067 if (label == label_for_this_bb
1068 || ! DECL_ARTIFICIAL (label)
1069 || DECL_NONLOCAL (label)
1070 || FORCED_LABEL (label))
1071 bsi_next (&i);
1072 else
1073 bsi_remove (&i, true);
1077 free (label_for_bb);
1080 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1081 and scan the sorted vector of cases. Combine the ones jumping to the
1082 same label.
1083 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1085 void
1086 group_case_labels (void)
1088 basic_block bb;
1090 FOR_EACH_BB (bb)
1092 tree stmt = last_stmt (bb);
1093 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1095 tree labels = SWITCH_LABELS (stmt);
1096 int old_size = TREE_VEC_LENGTH (labels);
1097 int i, j, new_size = old_size;
1098 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1099 tree default_label;
1101 /* The default label is always the last case in a switch
1102 statement after gimplification. */
1103 default_label = CASE_LABEL (default_case);
1105 /* Look for possible opportunities to merge cases.
1106 Ignore the last element of the label vector because it
1107 must be the default case. */
1108 i = 0;
1109 while (i < old_size - 1)
1111 tree base_case, base_label, base_high;
1112 base_case = TREE_VEC_ELT (labels, i);
1114 gcc_assert (base_case);
1115 base_label = CASE_LABEL (base_case);
1117 /* Discard cases that have the same destination as the
1118 default case. */
1119 if (base_label == default_label)
1121 TREE_VEC_ELT (labels, i) = NULL_TREE;
1122 i++;
1123 new_size--;
1124 continue;
1127 base_high = CASE_HIGH (base_case) ?
1128 CASE_HIGH (base_case) : CASE_LOW (base_case);
1129 i++;
1130 /* Try to merge case labels. Break out when we reach the end
1131 of the label vector or when we cannot merge the next case
1132 label with the current one. */
1133 while (i < old_size - 1)
1135 tree merge_case = TREE_VEC_ELT (labels, i);
1136 tree merge_label = CASE_LABEL (merge_case);
1137 tree t = int_const_binop (PLUS_EXPR, base_high,
1138 integer_one_node, 1);
1140 /* Merge the cases if they jump to the same place,
1141 and their ranges are consecutive. */
1142 if (merge_label == base_label
1143 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1145 base_high = CASE_HIGH (merge_case) ?
1146 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1147 CASE_HIGH (base_case) = base_high;
1148 TREE_VEC_ELT (labels, i) = NULL_TREE;
1149 new_size--;
1150 i++;
1152 else
1153 break;
1157 /* Compress the case labels in the label vector, and adjust the
1158 length of the vector. */
1159 for (i = 0, j = 0; i < new_size; i++)
1161 while (! TREE_VEC_ELT (labels, j))
1162 j++;
1163 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1165 TREE_VEC_LENGTH (labels) = new_size;
1170 /* Checks whether we can merge block B into block A. */
1172 static bool
1173 tree_can_merge_blocks_p (basic_block a, basic_block b)
1175 tree stmt;
1176 block_stmt_iterator bsi;
1177 tree phi;
1179 if (!single_succ_p (a))
1180 return false;
1182 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1183 return false;
1185 if (single_succ (a) != b)
1186 return false;
1188 if (!single_pred_p (b))
1189 return false;
1191 if (b == EXIT_BLOCK_PTR)
1192 return false;
1194 /* If A ends by a statement causing exceptions or something similar, we
1195 cannot merge the blocks. */
1196 stmt = last_stmt (a);
1197 if (stmt && stmt_ends_bb_p (stmt))
1198 return false;
1200 /* Do not allow a block with only a non-local label to be merged. */
1201 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1202 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1203 return false;
1205 /* It must be possible to eliminate all phi nodes in B. If ssa form
1206 is not up-to-date, we cannot eliminate any phis; however, if only
1207 some symbols as whole are marked for renaming, this is not a problem,
1208 as phi nodes for those symbols are irrelevant in updating anyway. */
1209 phi = phi_nodes (b);
1210 if (phi)
1212 if (name_mappings_registered_p ())
1213 return false;
1215 for (; phi; phi = PHI_CHAIN (phi))
1216 if (!is_gimple_reg (PHI_RESULT (phi))
1217 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1218 return false;
1221 /* Do not remove user labels. */
1222 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1224 stmt = bsi_stmt (bsi);
1225 if (TREE_CODE (stmt) != LABEL_EXPR)
1226 break;
1227 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1228 return false;
1231 /* Protect the loop latches. */
1232 if (current_loops
1233 && b->loop_father->latch == b)
1234 return false;
1236 return true;
1239 /* Replaces all uses of NAME by VAL. */
1241 void
1242 replace_uses_by (tree name, tree val)
1244 imm_use_iterator imm_iter;
1245 use_operand_p use;
1246 tree stmt;
1247 edge e;
1249 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1251 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1253 replace_exp (use, val);
1255 if (TREE_CODE (stmt) == PHI_NODE)
1257 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1258 if (e->flags & EDGE_ABNORMAL)
1260 /* This can only occur for virtual operands, since
1261 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1262 would prevent replacement. */
1263 gcc_assert (!is_gimple_reg (name));
1264 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1268 if (TREE_CODE (stmt) != PHI_NODE)
1270 tree rhs;
1272 fold_stmt_inplace (stmt);
1273 rhs = get_rhs (stmt);
1274 if (TREE_CODE (rhs) == ADDR_EXPR)
1275 recompute_tree_invariant_for_addr_expr (rhs);
1277 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1278 mark_new_vars_to_rename (stmt);
1282 gcc_assert (num_imm_uses (name) == 0);
1284 /* Also update the trees stored in loop structures. */
1285 if (current_loops)
1287 struct loop *loop;
1288 loop_iterator li;
1290 FOR_EACH_LOOP (li, loop, 0)
1292 substitute_in_loop_info (loop, name, val);
1297 /* Merge block B into block A. */
1299 static void
1300 tree_merge_blocks (basic_block a, basic_block b)
1302 block_stmt_iterator bsi;
1303 tree_stmt_iterator last;
1304 tree phi;
1306 if (dump_file)
1307 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1309 /* Remove all single-valued PHI nodes from block B of the form
1310 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1311 bsi = bsi_last (a);
1312 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1314 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1315 tree copy;
1316 bool may_replace_uses = may_propagate_copy (def, use);
1318 /* In case we have loops to care about, do not propagate arguments of
1319 loop closed ssa phi nodes. */
1320 if (current_loops
1321 && is_gimple_reg (def)
1322 && TREE_CODE (use) == SSA_NAME
1323 && a->loop_father != b->loop_father)
1324 may_replace_uses = false;
1326 if (!may_replace_uses)
1328 gcc_assert (is_gimple_reg (def));
1330 /* Note that just emitting the copies is fine -- there is no problem
1331 with ordering of phi nodes. This is because A is the single
1332 predecessor of B, therefore results of the phi nodes cannot
1333 appear as arguments of the phi nodes. */
1334 copy = build2_gimple (GIMPLE_MODIFY_STMT, def, use);
1335 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1336 SET_PHI_RESULT (phi, NULL_TREE);
1337 SSA_NAME_DEF_STMT (def) = copy;
1339 else
1340 replace_uses_by (def, use);
1342 remove_phi_node (phi, NULL);
1345 /* Ensure that B follows A. */
1346 move_block_after (b, a);
1348 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1349 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1351 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1352 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1354 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1356 tree label = bsi_stmt (bsi);
1358 bsi_remove (&bsi, false);
1359 /* Now that we can thread computed gotos, we might have
1360 a situation where we have a forced label in block B
1361 However, the label at the start of block B might still be
1362 used in other ways (think about the runtime checking for
1363 Fortran assigned gotos). So we can not just delete the
1364 label. Instead we move the label to the start of block A. */
1365 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1367 block_stmt_iterator dest_bsi = bsi_start (a);
1368 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1371 else
1373 change_bb_for_stmt (bsi_stmt (bsi), a);
1374 bsi_next (&bsi);
1378 /* Merge the chains. */
1379 last = tsi_last (a->stmt_list);
1380 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1381 b->stmt_list = NULL;
1385 /* Return the one of two successors of BB that is not reachable by a
1386 reached by a complex edge, if there is one. Else, return BB. We use
1387 this in optimizations that use post-dominators for their heuristics,
1388 to catch the cases in C++ where function calls are involved. */
1390 basic_block
1391 single_noncomplex_succ (basic_block bb)
1393 edge e0, e1;
1394 if (EDGE_COUNT (bb->succs) != 2)
1395 return bb;
1397 e0 = EDGE_SUCC (bb, 0);
1398 e1 = EDGE_SUCC (bb, 1);
1399 if (e0->flags & EDGE_COMPLEX)
1400 return e1->dest;
1401 if (e1->flags & EDGE_COMPLEX)
1402 return e0->dest;
1404 return bb;
1408 /* Walk the function tree removing unnecessary statements.
1410 * Empty statement nodes are removed
1412 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1414 * Unnecessary COND_EXPRs are removed
1416 * Some unnecessary BIND_EXPRs are removed
1418 Clearly more work could be done. The trick is doing the analysis
1419 and removal fast enough to be a net improvement in compile times.
1421 Note that when we remove a control structure such as a COND_EXPR
1422 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1423 to ensure we eliminate all the useless code. */
1425 struct rus_data
1427 tree *last_goto;
1428 bool repeat;
1429 bool may_throw;
1430 bool may_branch;
1431 bool has_label;
1434 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1436 static bool
1437 remove_useless_stmts_warn_notreached (tree stmt)
1439 if (EXPR_HAS_LOCATION (stmt))
1441 location_t loc = EXPR_LOCATION (stmt);
1442 if (LOCATION_LINE (loc) > 0)
1444 warning (0, "%Hwill never be executed", &loc);
1445 return true;
1449 switch (TREE_CODE (stmt))
1451 case STATEMENT_LIST:
1453 tree_stmt_iterator i;
1454 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1455 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1456 return true;
1458 break;
1460 case COND_EXPR:
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1462 return true;
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1464 return true;
1465 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1466 return true;
1467 break;
1469 case TRY_FINALLY_EXPR:
1470 case TRY_CATCH_EXPR:
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1472 return true;
1473 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1474 return true;
1475 break;
1477 case CATCH_EXPR:
1478 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1479 case EH_FILTER_EXPR:
1480 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1481 case BIND_EXPR:
1482 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1484 default:
1485 /* Not a live container. */
1486 break;
1489 return false;
1492 static void
1493 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1495 tree then_clause, else_clause, cond;
1496 bool save_has_label, then_has_label, else_has_label;
1498 save_has_label = data->has_label;
1499 data->has_label = false;
1500 data->last_goto = NULL;
1502 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1504 then_has_label = data->has_label;
1505 data->has_label = false;
1506 data->last_goto = NULL;
1508 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1510 else_has_label = data->has_label;
1511 data->has_label = save_has_label | then_has_label | else_has_label;
1513 then_clause = COND_EXPR_THEN (*stmt_p);
1514 else_clause = COND_EXPR_ELSE (*stmt_p);
1515 cond = fold (COND_EXPR_COND (*stmt_p));
1517 /* If neither arm does anything at all, we can remove the whole IF. */
1518 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1520 *stmt_p = build_empty_stmt ();
1521 data->repeat = true;
1524 /* If there are no reachable statements in an arm, then we can
1525 zap the entire conditional. */
1526 else if (integer_nonzerop (cond) && !else_has_label)
1528 if (warn_notreached)
1529 remove_useless_stmts_warn_notreached (else_clause);
1530 *stmt_p = then_clause;
1531 data->repeat = true;
1533 else if (integer_zerop (cond) && !then_has_label)
1535 if (warn_notreached)
1536 remove_useless_stmts_warn_notreached (then_clause);
1537 *stmt_p = else_clause;
1538 data->repeat = true;
1541 /* Check a couple of simple things on then/else with single stmts. */
1542 else
1544 tree then_stmt = expr_only (then_clause);
1545 tree else_stmt = expr_only (else_clause);
1547 /* Notice branches to a common destination. */
1548 if (then_stmt && else_stmt
1549 && TREE_CODE (then_stmt) == GOTO_EXPR
1550 && TREE_CODE (else_stmt) == GOTO_EXPR
1551 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1553 *stmt_p = then_stmt;
1554 data->repeat = true;
1557 /* If the THEN/ELSE clause merely assigns a value to a variable or
1558 parameter which is already known to contain that value, then
1559 remove the useless THEN/ELSE clause. */
1560 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1562 if (else_stmt
1563 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1564 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1565 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1566 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1568 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1569 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1570 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1571 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1573 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1574 ? then_stmt : else_stmt);
1575 tree *location = (TREE_CODE (cond) == EQ_EXPR
1576 ? &COND_EXPR_THEN (*stmt_p)
1577 : &COND_EXPR_ELSE (*stmt_p));
1579 if (stmt
1580 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1582 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1583 *location = alloc_stmt_list ();
1587 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1588 would be re-introduced during lowering. */
1589 data->last_goto = NULL;
1593 static void
1594 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1596 bool save_may_branch, save_may_throw;
1597 bool this_may_branch, this_may_throw;
1599 /* Collect may_branch and may_throw information for the body only. */
1600 save_may_branch = data->may_branch;
1601 save_may_throw = data->may_throw;
1602 data->may_branch = false;
1603 data->may_throw = false;
1604 data->last_goto = NULL;
1606 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1608 this_may_branch = data->may_branch;
1609 this_may_throw = data->may_throw;
1610 data->may_branch |= save_may_branch;
1611 data->may_throw |= save_may_throw;
1612 data->last_goto = NULL;
1614 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1616 /* If the body is empty, then we can emit the FINALLY block without
1617 the enclosing TRY_FINALLY_EXPR. */
1618 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1620 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1621 data->repeat = true;
1624 /* If the handler is empty, then we can emit the TRY block without
1625 the enclosing TRY_FINALLY_EXPR. */
1626 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1628 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1629 data->repeat = true;
1632 /* If the body neither throws, nor branches, then we can safely
1633 string the TRY and FINALLY blocks together. */
1634 else if (!this_may_branch && !this_may_throw)
1636 tree stmt = *stmt_p;
1637 *stmt_p = TREE_OPERAND (stmt, 0);
1638 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1639 data->repeat = true;
1644 static void
1645 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1647 bool save_may_throw, this_may_throw;
1648 tree_stmt_iterator i;
1649 tree stmt;
1651 /* Collect may_throw information for the body only. */
1652 save_may_throw = data->may_throw;
1653 data->may_throw = false;
1654 data->last_goto = NULL;
1656 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1658 this_may_throw = data->may_throw;
1659 data->may_throw = save_may_throw;
1661 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1662 if (!this_may_throw)
1664 if (warn_notreached)
1665 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1666 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1667 data->repeat = true;
1668 return;
1671 /* Process the catch clause specially. We may be able to tell that
1672 no exceptions propagate past this point. */
1674 this_may_throw = true;
1675 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1676 stmt = tsi_stmt (i);
1677 data->last_goto = NULL;
1679 switch (TREE_CODE (stmt))
1681 case CATCH_EXPR:
1682 for (; !tsi_end_p (i); tsi_next (&i))
1684 stmt = tsi_stmt (i);
1685 /* If we catch all exceptions, then the body does not
1686 propagate exceptions past this point. */
1687 if (CATCH_TYPES (stmt) == NULL)
1688 this_may_throw = false;
1689 data->last_goto = NULL;
1690 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1692 break;
1694 case EH_FILTER_EXPR:
1695 if (EH_FILTER_MUST_NOT_THROW (stmt))
1696 this_may_throw = false;
1697 else if (EH_FILTER_TYPES (stmt) == NULL)
1698 this_may_throw = false;
1699 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1700 break;
1702 default:
1703 /* Otherwise this is a cleanup. */
1704 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1706 /* If the cleanup is empty, then we can emit the TRY block without
1707 the enclosing TRY_CATCH_EXPR. */
1708 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1710 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1711 data->repeat = true;
1713 break;
1715 data->may_throw |= this_may_throw;
1719 static void
1720 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1722 tree block;
1724 /* First remove anything underneath the BIND_EXPR. */
1725 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1727 /* If the BIND_EXPR has no variables, then we can pull everything
1728 up one level and remove the BIND_EXPR, unless this is the toplevel
1729 BIND_EXPR for the current function or an inlined function.
1731 When this situation occurs we will want to apply this
1732 optimization again. */
1733 block = BIND_EXPR_BLOCK (*stmt_p);
1734 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1735 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1736 && (! block
1737 || ! BLOCK_ABSTRACT_ORIGIN (block)
1738 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1739 != FUNCTION_DECL)))
1741 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1742 data->repeat = true;
1747 static void
1748 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1750 tree dest = GOTO_DESTINATION (*stmt_p);
1752 data->may_branch = true;
1753 data->last_goto = NULL;
1755 /* Record the last goto expr, so that we can delete it if unnecessary. */
1756 if (TREE_CODE (dest) == LABEL_DECL)
1757 data->last_goto = stmt_p;
1761 static void
1762 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1764 tree label = LABEL_EXPR_LABEL (*stmt_p);
1766 data->has_label = true;
1768 /* We do want to jump across non-local label receiver code. */
1769 if (DECL_NONLOCAL (label))
1770 data->last_goto = NULL;
1772 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1774 *data->last_goto = build_empty_stmt ();
1775 data->repeat = true;
1778 /* ??? Add something here to delete unused labels. */
1782 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1783 decl. This allows us to eliminate redundant or useless
1784 calls to "const" functions.
1786 Gimplifier already does the same operation, but we may notice functions
1787 being const and pure once their calls has been gimplified, so we need
1788 to update the flag. */
1790 static void
1791 update_call_expr_flags (tree call)
1793 tree decl = get_callee_fndecl (call);
1794 if (!decl)
1795 return;
1796 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1797 TREE_SIDE_EFFECTS (call) = 0;
1798 if (TREE_NOTHROW (decl))
1799 TREE_NOTHROW (call) = 1;
1803 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1805 void
1806 notice_special_calls (tree t)
1808 int flags = call_expr_flags (t);
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 current_function_calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 current_function_calls_setjmp = true;
1817 /* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1820 void
1821 clear_special_calls (void)
1823 current_function_calls_alloca = false;
1824 current_function_calls_setjmp = false;
1828 static void
1829 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1831 tree t = *tp, op;
1833 switch (TREE_CODE (t))
1835 case COND_EXPR:
1836 remove_useless_stmts_cond (tp, data);
1837 break;
1839 case TRY_FINALLY_EXPR:
1840 remove_useless_stmts_tf (tp, data);
1841 break;
1843 case TRY_CATCH_EXPR:
1844 remove_useless_stmts_tc (tp, data);
1845 break;
1847 case BIND_EXPR:
1848 remove_useless_stmts_bind (tp, data);
1849 break;
1851 case GOTO_EXPR:
1852 remove_useless_stmts_goto (tp, data);
1853 break;
1855 case LABEL_EXPR:
1856 remove_useless_stmts_label (tp, data);
1857 break;
1859 case RETURN_EXPR:
1860 fold_stmt (tp);
1861 data->last_goto = NULL;
1862 data->may_branch = true;
1863 break;
1865 case CALL_EXPR:
1866 fold_stmt (tp);
1867 data->last_goto = NULL;
1868 notice_special_calls (t);
1869 update_call_expr_flags (t);
1870 if (tree_could_throw_p (t))
1871 data->may_throw = true;
1872 break;
1874 case MODIFY_EXPR:
1875 gcc_unreachable ();
1877 case GIMPLE_MODIFY_STMT:
1878 data->last_goto = NULL;
1879 fold_stmt (tp);
1880 op = get_call_expr_in (t);
1881 if (op)
1883 update_call_expr_flags (op);
1884 notice_special_calls (op);
1886 if (tree_could_throw_p (t))
1887 data->may_throw = true;
1888 break;
1890 case STATEMENT_LIST:
1892 tree_stmt_iterator i = tsi_start (t);
1893 while (!tsi_end_p (i))
1895 t = tsi_stmt (i);
1896 if (IS_EMPTY_STMT (t))
1898 tsi_delink (&i);
1899 continue;
1902 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1904 t = tsi_stmt (i);
1905 if (TREE_CODE (t) == STATEMENT_LIST)
1907 tsi_link_before (&i, t, TSI_SAME_STMT);
1908 tsi_delink (&i);
1910 else
1911 tsi_next (&i);
1914 break;
1915 case ASM_EXPR:
1916 fold_stmt (tp);
1917 data->last_goto = NULL;
1918 break;
1920 default:
1921 data->last_goto = NULL;
1922 break;
1926 static unsigned int
1927 remove_useless_stmts (void)
1929 struct rus_data data;
1931 clear_special_calls ();
1935 memset (&data, 0, sizeof (data));
1936 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1938 while (data.repeat);
1939 return 0;
1943 struct tree_opt_pass pass_remove_useless_stmts =
1945 "useless", /* name */
1946 NULL, /* gate */
1947 remove_useless_stmts, /* execute */
1948 NULL, /* sub */
1949 NULL, /* next */
1950 0, /* static_pass_number */
1951 0, /* tv_id */
1952 PROP_gimple_any, /* properties_required */
1953 0, /* properties_provided */
1954 0, /* properties_destroyed */
1955 0, /* todo_flags_start */
1956 TODO_dump_func, /* todo_flags_finish */
1957 0 /* letter */
1960 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1962 static void
1963 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1965 tree phi;
1967 /* Since this block is no longer reachable, we can just delete all
1968 of its PHI nodes. */
1969 phi = phi_nodes (bb);
1970 while (phi)
1972 tree next = PHI_CHAIN (phi);
1973 remove_phi_node (phi, NULL_TREE);
1974 phi = next;
1977 /* Remove edges to BB's successors. */
1978 while (EDGE_COUNT (bb->succs) > 0)
1979 remove_edge (EDGE_SUCC (bb, 0));
1983 /* Remove statements of basic block BB. */
1985 static void
1986 remove_bb (basic_block bb)
1988 block_stmt_iterator i;
1989 #ifdef USE_MAPPED_LOCATION
1990 source_location loc = UNKNOWN_LOCATION;
1991 #else
1992 source_locus loc = 0;
1993 #endif
1995 if (dump_file)
1997 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1998 if (dump_flags & TDF_DETAILS)
2000 dump_bb (bb, dump_file, 0);
2001 fprintf (dump_file, "\n");
2005 if (current_loops)
2007 struct loop *loop = bb->loop_father;
2009 /* If a loop gets removed, clean up the information associated
2010 with it. */
2011 if (loop->latch == bb
2012 || loop->header == bb)
2013 free_numbers_of_iterations_estimates_loop (loop);
2016 /* Remove all the instructions in the block. */
2017 for (i = bsi_start (bb); !bsi_end_p (i);)
2019 tree stmt = bsi_stmt (i);
2020 if (TREE_CODE (stmt) == LABEL_EXPR
2021 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2022 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2024 basic_block new_bb;
2025 block_stmt_iterator new_bsi;
2027 /* A non-reachable non-local label may still be referenced.
2028 But it no longer needs to carry the extra semantics of
2029 non-locality. */
2030 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2032 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2033 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2036 new_bb = bb->prev_bb;
2037 new_bsi = bsi_start (new_bb);
2038 bsi_remove (&i, false);
2039 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2041 else
2043 /* Release SSA definitions if we are in SSA. Note that we
2044 may be called when not in SSA. For example,
2045 final_cleanup calls this function via
2046 cleanup_tree_cfg. */
2047 if (gimple_in_ssa_p (cfun))
2048 release_defs (stmt);
2050 bsi_remove (&i, true);
2053 /* Don't warn for removed gotos. Gotos are often removed due to
2054 jump threading, thus resulting in bogus warnings. Not great,
2055 since this way we lose warnings for gotos in the original
2056 program that are indeed unreachable. */
2057 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2059 #ifdef USE_MAPPED_LOCATION
2060 if (EXPR_HAS_LOCATION (stmt))
2061 loc = EXPR_LOCATION (stmt);
2062 #else
2063 source_locus t;
2064 t = EXPR_LOCUS (stmt);
2065 if (t && LOCATION_LINE (*t) > 0)
2066 loc = t;
2067 #endif
2071 /* If requested, give a warning that the first statement in the
2072 block is unreachable. We walk statements backwards in the
2073 loop above, so the last statement we process is the first statement
2074 in the block. */
2075 #ifdef USE_MAPPED_LOCATION
2076 if (loc > BUILTINS_LOCATION)
2077 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2078 #else
2079 if (loc)
2080 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2081 #endif
2083 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2087 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2088 predicate VAL, return the edge that will be taken out of the block.
2089 If VAL does not match a unique edge, NULL is returned. */
2091 edge
2092 find_taken_edge (basic_block bb, tree val)
2094 tree stmt;
2096 stmt = last_stmt (bb);
2098 gcc_assert (stmt);
2099 gcc_assert (is_ctrl_stmt (stmt));
2100 gcc_assert (val);
2102 if (! is_gimple_min_invariant (val))
2103 return NULL;
2105 if (TREE_CODE (stmt) == COND_EXPR)
2106 return find_taken_edge_cond_expr (bb, val);
2108 if (TREE_CODE (stmt) == SWITCH_EXPR)
2109 return find_taken_edge_switch_expr (bb, val);
2111 if (computed_goto_p (stmt))
2112 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2114 gcc_unreachable ();
2117 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2118 statement, determine which of the outgoing edges will be taken out of the
2119 block. Return NULL if either edge may be taken. */
2121 static edge
2122 find_taken_edge_computed_goto (basic_block bb, tree val)
2124 basic_block dest;
2125 edge e = NULL;
2127 dest = label_to_block (val);
2128 if (dest)
2130 e = find_edge (bb, dest);
2131 gcc_assert (e != NULL);
2134 return e;
2137 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2138 statement, determine which of the two edges will be taken out of the
2139 block. Return NULL if either edge may be taken. */
2141 static edge
2142 find_taken_edge_cond_expr (basic_block bb, tree val)
2144 edge true_edge, false_edge;
2146 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2148 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2149 return (zero_p (val) ? false_edge : true_edge);
2152 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2153 statement, determine which edge will be taken out of the block. Return
2154 NULL if any edge may be taken. */
2156 static edge
2157 find_taken_edge_switch_expr (basic_block bb, tree val)
2159 tree switch_expr, taken_case;
2160 basic_block dest_bb;
2161 edge e;
2163 switch_expr = last_stmt (bb);
2164 taken_case = find_case_label_for_value (switch_expr, val);
2165 dest_bb = label_to_block (CASE_LABEL (taken_case));
2167 e = find_edge (bb, dest_bb);
2168 gcc_assert (e);
2169 return e;
2173 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2174 We can make optimal use here of the fact that the case labels are
2175 sorted: We can do a binary search for a case matching VAL. */
2177 static tree
2178 find_case_label_for_value (tree switch_expr, tree val)
2180 tree vec = SWITCH_LABELS (switch_expr);
2181 size_t low, high, n = TREE_VEC_LENGTH (vec);
2182 tree default_case = TREE_VEC_ELT (vec, n - 1);
2184 for (low = -1, high = n - 1; high - low > 1; )
2186 size_t i = (high + low) / 2;
2187 tree t = TREE_VEC_ELT (vec, i);
2188 int cmp;
2190 /* Cache the result of comparing CASE_LOW and val. */
2191 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2193 if (cmp > 0)
2194 high = i;
2195 else
2196 low = i;
2198 if (CASE_HIGH (t) == NULL)
2200 /* A singe-valued case label. */
2201 if (cmp == 0)
2202 return t;
2204 else
2206 /* A case range. We can only handle integer ranges. */
2207 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2208 return t;
2212 return default_case;
2218 /*---------------------------------------------------------------------------
2219 Debugging functions
2220 ---------------------------------------------------------------------------*/
2222 /* Dump tree-specific information of block BB to file OUTF. */
2224 void
2225 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2227 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2231 /* Dump a basic block on stderr. */
2233 void
2234 debug_tree_bb (basic_block bb)
2236 dump_bb (bb, stderr, 0);
2240 /* Dump basic block with index N on stderr. */
2242 basic_block
2243 debug_tree_bb_n (int n)
2245 debug_tree_bb (BASIC_BLOCK (n));
2246 return BASIC_BLOCK (n);
2250 /* Dump the CFG on stderr.
2252 FLAGS are the same used by the tree dumping functions
2253 (see TDF_* in tree-pass.h). */
2255 void
2256 debug_tree_cfg (int flags)
2258 dump_tree_cfg (stderr, flags);
2262 /* Dump the program showing basic block boundaries on the given FILE.
2264 FLAGS are the same used by the tree dumping functions (see TDF_* in
2265 tree.h). */
2267 void
2268 dump_tree_cfg (FILE *file, int flags)
2270 if (flags & TDF_DETAILS)
2272 const char *funcname
2273 = lang_hooks.decl_printable_name (current_function_decl, 2);
2275 fputc ('\n', file);
2276 fprintf (file, ";; Function %s\n\n", funcname);
2277 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2278 n_basic_blocks, n_edges, last_basic_block);
2280 brief_dump_cfg (file);
2281 fprintf (file, "\n");
2284 if (flags & TDF_STATS)
2285 dump_cfg_stats (file);
2287 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2291 /* Dump CFG statistics on FILE. */
2293 void
2294 dump_cfg_stats (FILE *file)
2296 static long max_num_merged_labels = 0;
2297 unsigned long size, total = 0;
2298 long num_edges;
2299 basic_block bb;
2300 const char * const fmt_str = "%-30s%-13s%12s\n";
2301 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2302 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2303 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2304 const char *funcname
2305 = lang_hooks.decl_printable_name (current_function_decl, 2);
2308 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2310 fprintf (file, "---------------------------------------------------------\n");
2311 fprintf (file, fmt_str, "", " Number of ", "Memory");
2312 fprintf (file, fmt_str, "", " instances ", "used ");
2313 fprintf (file, "---------------------------------------------------------\n");
2315 size = n_basic_blocks * sizeof (struct basic_block_def);
2316 total += size;
2317 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2318 SCALE (size), LABEL (size));
2320 num_edges = 0;
2321 FOR_EACH_BB (bb)
2322 num_edges += EDGE_COUNT (bb->succs);
2323 size = num_edges * sizeof (struct edge_def);
2324 total += size;
2325 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2327 fprintf (file, "---------------------------------------------------------\n");
2328 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2329 LABEL (total));
2330 fprintf (file, "---------------------------------------------------------\n");
2331 fprintf (file, "\n");
2333 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2334 max_num_merged_labels = cfg_stats.num_merged_labels;
2336 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2337 cfg_stats.num_merged_labels, max_num_merged_labels);
2339 fprintf (file, "\n");
2343 /* Dump CFG statistics on stderr. Keep extern so that it's always
2344 linked in the final executable. */
2346 void
2347 debug_cfg_stats (void)
2349 dump_cfg_stats (stderr);
2353 /* Dump the flowgraph to a .vcg FILE. */
2355 static void
2356 tree_cfg2vcg (FILE *file)
2358 edge e;
2359 edge_iterator ei;
2360 basic_block bb;
2361 const char *funcname
2362 = lang_hooks.decl_printable_name (current_function_decl, 2);
2364 /* Write the file header. */
2365 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2366 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2367 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2369 /* Write blocks and edges. */
2370 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2372 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2373 e->dest->index);
2375 if (e->flags & EDGE_FAKE)
2376 fprintf (file, " linestyle: dotted priority: 10");
2377 else
2378 fprintf (file, " linestyle: solid priority: 100");
2380 fprintf (file, " }\n");
2382 fputc ('\n', file);
2384 FOR_EACH_BB (bb)
2386 enum tree_code head_code, end_code;
2387 const char *head_name, *end_name;
2388 int head_line = 0;
2389 int end_line = 0;
2390 tree first = first_stmt (bb);
2391 tree last = last_stmt (bb);
2393 if (first)
2395 head_code = TREE_CODE (first);
2396 head_name = tree_code_name[head_code];
2397 head_line = get_lineno (first);
2399 else
2400 head_name = "no-statement";
2402 if (last)
2404 end_code = TREE_CODE (last);
2405 end_name = tree_code_name[end_code];
2406 end_line = get_lineno (last);
2408 else
2409 end_name = "no-statement";
2411 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2412 bb->index, bb->index, head_name, head_line, end_name,
2413 end_line);
2415 FOR_EACH_EDGE (e, ei, bb->succs)
2417 if (e->dest == EXIT_BLOCK_PTR)
2418 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2419 else
2420 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2422 if (e->flags & EDGE_FAKE)
2423 fprintf (file, " priority: 10 linestyle: dotted");
2424 else
2425 fprintf (file, " priority: 100 linestyle: solid");
2427 fprintf (file, " }\n");
2430 if (bb->next_bb != EXIT_BLOCK_PTR)
2431 fputc ('\n', file);
2434 fputs ("}\n\n", file);
2439 /*---------------------------------------------------------------------------
2440 Miscellaneous helpers
2441 ---------------------------------------------------------------------------*/
2443 /* Return true if T represents a stmt that always transfers control. */
2445 bool
2446 is_ctrl_stmt (tree t)
2448 return (TREE_CODE (t) == COND_EXPR
2449 || TREE_CODE (t) == SWITCH_EXPR
2450 || TREE_CODE (t) == GOTO_EXPR
2451 || TREE_CODE (t) == RETURN_EXPR
2452 || TREE_CODE (t) == RESX_EXPR);
2456 /* Return true if T is a statement that may alter the flow of control
2457 (e.g., a call to a non-returning function). */
2459 bool
2460 is_ctrl_altering_stmt (tree t)
2462 tree call;
2464 gcc_assert (t);
2465 call = get_call_expr_in (t);
2466 if (call)
2468 /* A non-pure/const CALL_EXPR alters flow control if the current
2469 function has nonlocal labels. */
2470 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2471 return true;
2473 /* A CALL_EXPR also alters control flow if it does not return. */
2474 if (call_expr_flags (call) & ECF_NORETURN)
2475 return true;
2478 /* OpenMP directives alter control flow. */
2479 if (OMP_DIRECTIVE_P (t))
2480 return true;
2482 /* If a statement can throw, it alters control flow. */
2483 return tree_can_throw_internal (t);
2487 /* Return true if T is a computed goto. */
2489 bool
2490 computed_goto_p (tree t)
2492 return (TREE_CODE (t) == GOTO_EXPR
2493 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2497 /* Return true if T is a simple local goto. */
2499 bool
2500 simple_goto_p (tree t)
2502 return (TREE_CODE (t) == GOTO_EXPR
2503 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2507 /* Return true if T can make an abnormal transfer of control flow.
2508 Transfers of control flow associated with EH are excluded. */
2510 bool
2511 tree_can_make_abnormal_goto (tree t)
2513 if (computed_goto_p (t))
2514 return true;
2515 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2516 t = GIMPLE_STMT_OPERAND (t, 1);
2517 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2518 t = TREE_OPERAND (t, 0);
2519 if (TREE_CODE (t) == CALL_EXPR)
2520 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2521 return false;
2525 /* Return true if T should start a new basic block. PREV_T is the
2526 statement preceding T. It is used when T is a label or a case label.
2527 Labels should only start a new basic block if their previous statement
2528 wasn't a label. Otherwise, sequence of labels would generate
2529 unnecessary basic blocks that only contain a single label. */
2531 static inline bool
2532 stmt_starts_bb_p (tree t, tree prev_t)
2534 if (t == NULL_TREE)
2535 return false;
2537 /* LABEL_EXPRs start a new basic block only if the preceding
2538 statement wasn't a label of the same type. This prevents the
2539 creation of consecutive blocks that have nothing but a single
2540 label. */
2541 if (TREE_CODE (t) == LABEL_EXPR)
2543 /* Nonlocal and computed GOTO targets always start a new block. */
2544 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2545 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2546 return true;
2548 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2550 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2551 return true;
2553 cfg_stats.num_merged_labels++;
2554 return false;
2556 else
2557 return true;
2560 return false;
2564 /* Return true if T should end a basic block. */
2566 bool
2567 stmt_ends_bb_p (tree t)
2569 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2573 /* Add gotos that used to be represented implicitly in the CFG. */
2575 void
2576 disband_implicit_edges (void)
2578 basic_block bb;
2579 block_stmt_iterator last;
2580 edge e;
2581 edge_iterator ei;
2582 tree stmt, label;
2584 FOR_EACH_BB (bb)
2586 last = bsi_last (bb);
2587 stmt = last_stmt (bb);
2589 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2591 /* Remove superfluous gotos from COND_EXPR branches. Moved
2592 from cfg_remove_useless_stmts here since it violates the
2593 invariants for tree--cfg correspondence and thus fits better
2594 here where we do it anyway. */
2595 e = find_edge (bb, bb->next_bb);
2596 if (e)
2598 if (e->flags & EDGE_TRUE_VALUE)
2599 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2600 else if (e->flags & EDGE_FALSE_VALUE)
2601 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2602 else
2603 gcc_unreachable ();
2604 e->flags |= EDGE_FALLTHRU;
2607 continue;
2610 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2612 /* Remove the RETURN_EXPR if we may fall though to the exit
2613 instead. */
2614 gcc_assert (single_succ_p (bb));
2615 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2617 if (bb->next_bb == EXIT_BLOCK_PTR
2618 && !TREE_OPERAND (stmt, 0))
2620 bsi_remove (&last, true);
2621 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2623 continue;
2626 /* There can be no fallthru edge if the last statement is a control
2627 one. */
2628 if (stmt && is_ctrl_stmt (stmt))
2629 continue;
2631 /* Find a fallthru edge and emit the goto if necessary. */
2632 FOR_EACH_EDGE (e, ei, bb->succs)
2633 if (e->flags & EDGE_FALLTHRU)
2634 break;
2636 if (!e || e->dest == bb->next_bb)
2637 continue;
2639 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2640 label = tree_block_label (e->dest);
2642 stmt = build1 (GOTO_EXPR, void_type_node, label);
2643 #ifdef USE_MAPPED_LOCATION
2644 SET_EXPR_LOCATION (stmt, e->goto_locus);
2645 #else
2646 SET_EXPR_LOCUS (stmt, e->goto_locus);
2647 #endif
2648 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2649 e->flags &= ~EDGE_FALLTHRU;
2653 /* Remove block annotations and other datastructures. */
2655 void
2656 delete_tree_cfg_annotations (void)
2658 label_to_block_map = NULL;
2662 /* Return the first statement in basic block BB. */
2664 tree
2665 first_stmt (basic_block bb)
2667 block_stmt_iterator i = bsi_start (bb);
2668 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2672 /* Return the last statement in basic block BB. */
2674 tree
2675 last_stmt (basic_block bb)
2677 block_stmt_iterator b = bsi_last (bb);
2678 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2682 /* Return a pointer to the last statement in block BB. */
2684 tree *
2685 last_stmt_ptr (basic_block bb)
2687 block_stmt_iterator last = bsi_last (bb);
2688 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2692 /* Return the last statement of an otherwise empty block. Return NULL
2693 if the block is totally empty, or if it contains more than one
2694 statement. */
2696 tree
2697 last_and_only_stmt (basic_block bb)
2699 block_stmt_iterator i = bsi_last (bb);
2700 tree last, prev;
2702 if (bsi_end_p (i))
2703 return NULL_TREE;
2705 last = bsi_stmt (i);
2706 bsi_prev (&i);
2707 if (bsi_end_p (i))
2708 return last;
2710 /* Empty statements should no longer appear in the instruction stream.
2711 Everything that might have appeared before should be deleted by
2712 remove_useless_stmts, and the optimizers should just bsi_remove
2713 instead of smashing with build_empty_stmt.
2715 Thus the only thing that should appear here in a block containing
2716 one executable statement is a label. */
2717 prev = bsi_stmt (i);
2718 if (TREE_CODE (prev) == LABEL_EXPR)
2719 return last;
2720 else
2721 return NULL_TREE;
2725 /* Mark BB as the basic block holding statement T. */
2727 void
2728 set_bb_for_stmt (tree t, basic_block bb)
2730 if (TREE_CODE (t) == PHI_NODE)
2731 PHI_BB (t) = bb;
2732 else if (TREE_CODE (t) == STATEMENT_LIST)
2734 tree_stmt_iterator i;
2735 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2736 set_bb_for_stmt (tsi_stmt (i), bb);
2738 else
2740 stmt_ann_t ann = get_stmt_ann (t);
2741 ann->bb = bb;
2743 /* If the statement is a label, add the label to block-to-labels map
2744 so that we can speed up edge creation for GOTO_EXPRs. */
2745 if (TREE_CODE (t) == LABEL_EXPR)
2747 int uid;
2749 t = LABEL_EXPR_LABEL (t);
2750 uid = LABEL_DECL_UID (t);
2751 if (uid == -1)
2753 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2754 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2755 if (old_len <= (unsigned) uid)
2757 basic_block *addr;
2758 unsigned new_len = 3 * uid / 2;
2760 VEC_safe_grow (basic_block, gc, label_to_block_map,
2761 new_len);
2762 addr = VEC_address (basic_block, label_to_block_map);
2763 memset (&addr[old_len],
2764 0, sizeof (basic_block) * (new_len - old_len));
2767 else
2768 /* We're moving an existing label. Make sure that we've
2769 removed it from the old block. */
2770 gcc_assert (!bb
2771 || !VEC_index (basic_block, label_to_block_map, uid));
2772 VEC_replace (basic_block, label_to_block_map, uid, bb);
2777 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2778 from one basic block to another.
2779 For BB splitting we can run into quadratic case, so performance is quite
2780 important and knowing that the tables are big enough, change_bb_for_stmt
2781 can inline as leaf function. */
2782 static inline void
2783 change_bb_for_stmt (tree t, basic_block bb)
2785 get_stmt_ann (t)->bb = bb;
2786 if (TREE_CODE (t) == LABEL_EXPR)
2787 VEC_replace (basic_block, label_to_block_map,
2788 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2791 /* Finds iterator for STMT. */
2793 extern block_stmt_iterator
2794 bsi_for_stmt (tree stmt)
2796 block_stmt_iterator bsi;
2798 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2799 if (bsi_stmt (bsi) == stmt)
2800 return bsi;
2802 gcc_unreachable ();
2805 /* Mark statement T as modified, and update it. */
2806 static inline void
2807 update_modified_stmts (tree t)
2809 if (TREE_CODE (t) == STATEMENT_LIST)
2811 tree_stmt_iterator i;
2812 tree stmt;
2813 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2815 stmt = tsi_stmt (i);
2816 update_stmt_if_modified (stmt);
2819 else
2820 update_stmt_if_modified (t);
2823 /* Insert statement (or statement list) T before the statement
2824 pointed-to by iterator I. M specifies how to update iterator I
2825 after insertion (see enum bsi_iterator_update). */
2827 void
2828 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2830 set_bb_for_stmt (t, i->bb);
2831 update_modified_stmts (t);
2832 tsi_link_before (&i->tsi, t, m);
2836 /* Insert statement (or statement list) T after the statement
2837 pointed-to by iterator I. M specifies how to update iterator I
2838 after insertion (see enum bsi_iterator_update). */
2840 void
2841 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2843 set_bb_for_stmt (t, i->bb);
2844 update_modified_stmts (t);
2845 tsi_link_after (&i->tsi, t, m);
2849 /* Remove the statement pointed to by iterator I. The iterator is updated
2850 to the next statement.
2852 When REMOVE_EH_INFO is true we remove the statement pointed to by
2853 iterator I from the EH tables. Otherwise we do not modify the EH
2854 tables.
2856 Generally, REMOVE_EH_INFO should be true when the statement is going to
2857 be removed from the IL and not reinserted elsewhere. */
2859 void
2860 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2862 tree t = bsi_stmt (*i);
2863 set_bb_for_stmt (t, NULL);
2864 delink_stmt_imm_use (t);
2865 tsi_delink (&i->tsi);
2866 mark_stmt_modified (t);
2867 if (remove_eh_info)
2868 remove_stmt_from_eh_region (t);
2872 /* Move the statement at FROM so it comes right after the statement at TO. */
2874 void
2875 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2877 tree stmt = bsi_stmt (*from);
2878 bsi_remove (from, false);
2879 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2883 /* Move the statement at FROM so it comes right before the statement at TO. */
2885 void
2886 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2888 tree stmt = bsi_stmt (*from);
2889 bsi_remove (from, false);
2890 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2894 /* Move the statement at FROM to the end of basic block BB. */
2896 void
2897 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2899 block_stmt_iterator last = bsi_last (bb);
2901 /* Have to check bsi_end_p because it could be an empty block. */
2902 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2903 bsi_move_before (from, &last);
2904 else
2905 bsi_move_after (from, &last);
2909 /* Replace the contents of the statement pointed to by iterator BSI
2910 with STMT. If UPDATE_EH_INFO is true, the exception handling
2911 information of the original statement is moved to the new statement. */
2913 void
2914 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2916 int eh_region;
2917 tree orig_stmt = bsi_stmt (*bsi);
2919 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2920 set_bb_for_stmt (stmt, bsi->bb);
2922 /* Preserve EH region information from the original statement, if
2923 requested by the caller. */
2924 if (update_eh_info)
2926 eh_region = lookup_stmt_eh_region (orig_stmt);
2927 if (eh_region >= 0)
2929 remove_stmt_from_eh_region (orig_stmt);
2930 add_stmt_to_eh_region (stmt, eh_region);
2934 delink_stmt_imm_use (orig_stmt);
2935 *bsi_stmt_ptr (*bsi) = stmt;
2936 mark_stmt_modified (stmt);
2937 update_modified_stmts (stmt);
2941 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2942 is made to place the statement in an existing basic block, but
2943 sometimes that isn't possible. When it isn't possible, the edge is
2944 split and the statement is added to the new block.
2946 In all cases, the returned *BSI points to the correct location. The
2947 return value is true if insertion should be done after the location,
2948 or false if it should be done before the location. If new basic block
2949 has to be created, it is stored in *NEW_BB. */
2951 static bool
2952 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2953 basic_block *new_bb)
2955 basic_block dest, src;
2956 tree tmp;
2958 dest = e->dest;
2959 restart:
2961 /* If the destination has one predecessor which has no PHI nodes,
2962 insert there. Except for the exit block.
2964 The requirement for no PHI nodes could be relaxed. Basically we
2965 would have to examine the PHIs to prove that none of them used
2966 the value set by the statement we want to insert on E. That
2967 hardly seems worth the effort. */
2968 if (single_pred_p (dest)
2969 && ! phi_nodes (dest)
2970 && dest != EXIT_BLOCK_PTR)
2972 *bsi = bsi_start (dest);
2973 if (bsi_end_p (*bsi))
2974 return true;
2976 /* Make sure we insert after any leading labels. */
2977 tmp = bsi_stmt (*bsi);
2978 while (TREE_CODE (tmp) == LABEL_EXPR)
2980 bsi_next (bsi);
2981 if (bsi_end_p (*bsi))
2982 break;
2983 tmp = bsi_stmt (*bsi);
2986 if (bsi_end_p (*bsi))
2988 *bsi = bsi_last (dest);
2989 return true;
2991 else
2992 return false;
2995 /* If the source has one successor, the edge is not abnormal and
2996 the last statement does not end a basic block, insert there.
2997 Except for the entry block. */
2998 src = e->src;
2999 if ((e->flags & EDGE_ABNORMAL) == 0
3000 && single_succ_p (src)
3001 && src != ENTRY_BLOCK_PTR)
3003 *bsi = bsi_last (src);
3004 if (bsi_end_p (*bsi))
3005 return true;
3007 tmp = bsi_stmt (*bsi);
3008 if (!stmt_ends_bb_p (tmp))
3009 return true;
3011 /* Insert code just before returning the value. We may need to decompose
3012 the return in the case it contains non-trivial operand. */
3013 if (TREE_CODE (tmp) == RETURN_EXPR)
3015 tree op = TREE_OPERAND (tmp, 0);
3016 if (op && !is_gimple_val (op))
3018 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
3019 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3020 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
3022 bsi_prev (bsi);
3023 return true;
3027 /* Otherwise, create a new basic block, and split this edge. */
3028 dest = split_edge (e);
3029 if (new_bb)
3030 *new_bb = dest;
3031 e = single_pred_edge (dest);
3032 goto restart;
3036 /* This routine will commit all pending edge insertions, creating any new
3037 basic blocks which are necessary. */
3039 void
3040 bsi_commit_edge_inserts (void)
3042 basic_block bb;
3043 edge e;
3044 edge_iterator ei;
3046 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3048 FOR_EACH_BB (bb)
3049 FOR_EACH_EDGE (e, ei, bb->succs)
3050 bsi_commit_one_edge_insert (e, NULL);
3054 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3055 to this block, otherwise set it to NULL. */
3057 void
3058 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3060 if (new_bb)
3061 *new_bb = NULL;
3062 if (PENDING_STMT (e))
3064 block_stmt_iterator bsi;
3065 tree stmt = PENDING_STMT (e);
3067 PENDING_STMT (e) = NULL_TREE;
3069 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3070 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3071 else
3072 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3077 /* Add STMT to the pending list of edge E. No actual insertion is
3078 made until a call to bsi_commit_edge_inserts () is made. */
3080 void
3081 bsi_insert_on_edge (edge e, tree stmt)
3083 append_to_statement_list (stmt, &PENDING_STMT (e));
3086 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3087 block has to be created, it is returned. */
3089 basic_block
3090 bsi_insert_on_edge_immediate (edge e, tree stmt)
3092 block_stmt_iterator bsi;
3093 basic_block new_bb = NULL;
3095 gcc_assert (!PENDING_STMT (e));
3097 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3098 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3099 else
3100 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3102 return new_bb;
3105 /*---------------------------------------------------------------------------
3106 Tree specific functions for CFG manipulation
3107 ---------------------------------------------------------------------------*/
3109 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3111 static void
3112 reinstall_phi_args (edge new_edge, edge old_edge)
3114 tree var, phi;
3116 if (!PENDING_STMT (old_edge))
3117 return;
3119 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3120 var && phi;
3121 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3123 tree result = TREE_PURPOSE (var);
3124 tree arg = TREE_VALUE (var);
3126 gcc_assert (result == PHI_RESULT (phi));
3128 add_phi_arg (phi, arg, new_edge);
3131 PENDING_STMT (old_edge) = NULL;
3134 /* Returns the basic block after which the new basic block created
3135 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3136 near its "logical" location. This is of most help to humans looking
3137 at debugging dumps. */
3139 static basic_block
3140 split_edge_bb_loc (edge edge_in)
3142 basic_block dest = edge_in->dest;
3144 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3145 return edge_in->src;
3146 else
3147 return dest->prev_bb;
3150 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3151 Abort on abnormal edges. */
3153 static basic_block
3154 tree_split_edge (edge edge_in)
3156 basic_block new_bb, after_bb, dest;
3157 edge new_edge, e;
3159 /* Abnormal edges cannot be split. */
3160 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3162 dest = edge_in->dest;
3164 after_bb = split_edge_bb_loc (edge_in);
3166 new_bb = create_empty_bb (after_bb);
3167 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3168 new_bb->count = edge_in->count;
3169 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3170 new_edge->probability = REG_BR_PROB_BASE;
3171 new_edge->count = edge_in->count;
3173 e = redirect_edge_and_branch (edge_in, new_bb);
3174 gcc_assert (e);
3175 reinstall_phi_args (new_edge, e);
3177 return new_bb;
3181 /* Return true when BB has label LABEL in it. */
3183 static bool
3184 has_label_p (basic_block bb, tree label)
3186 block_stmt_iterator bsi;
3188 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3190 tree stmt = bsi_stmt (bsi);
3192 if (TREE_CODE (stmt) != LABEL_EXPR)
3193 return false;
3194 if (LABEL_EXPR_LABEL (stmt) == label)
3195 return true;
3197 return false;
3201 /* Callback for walk_tree, check that all elements with address taken are
3202 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3203 inside a PHI node. */
3205 static tree
3206 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3208 tree t = *tp, x;
3209 bool in_phi = (data != NULL);
3211 if (TYPE_P (t))
3212 *walk_subtrees = 0;
3214 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3215 #define CHECK_OP(N, MSG) \
3216 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3217 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3219 switch (TREE_CODE (t))
3221 case SSA_NAME:
3222 if (SSA_NAME_IN_FREE_LIST (t))
3224 error ("SSA name in freelist but still referenced");
3225 return *tp;
3227 break;
3229 case ASSERT_EXPR:
3230 x = fold (ASSERT_EXPR_COND (t));
3231 if (x == boolean_false_node)
3233 error ("ASSERT_EXPR with an always-false condition");
3234 return *tp;
3236 break;
3238 case MODIFY_EXPR:
3239 gcc_unreachable ();
3241 case GIMPLE_MODIFY_STMT:
3242 x = GIMPLE_STMT_OPERAND (t, 0);
3243 if (TREE_CODE (x) == BIT_FIELD_REF
3244 && is_gimple_reg (TREE_OPERAND (x, 0)))
3246 error ("GIMPLE register modified with BIT_FIELD_REF");
3247 return t;
3249 break;
3251 case ADDR_EXPR:
3253 bool old_invariant;
3254 bool old_constant;
3255 bool old_side_effects;
3256 bool new_invariant;
3257 bool new_constant;
3258 bool new_side_effects;
3260 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3261 dead PHIs that take the address of something. But if the PHI
3262 result is dead, the fact that it takes the address of anything
3263 is irrelevant. Because we can not tell from here if a PHI result
3264 is dead, we just skip this check for PHIs altogether. This means
3265 we may be missing "valid" checks, but what can you do?
3266 This was PR19217. */
3267 if (in_phi)
3268 break;
3270 old_invariant = TREE_INVARIANT (t);
3271 old_constant = TREE_CONSTANT (t);
3272 old_side_effects = TREE_SIDE_EFFECTS (t);
3274 recompute_tree_invariant_for_addr_expr (t);
3275 new_invariant = TREE_INVARIANT (t);
3276 new_side_effects = TREE_SIDE_EFFECTS (t);
3277 new_constant = TREE_CONSTANT (t);
3279 if (old_invariant != new_invariant)
3281 error ("invariant not recomputed when ADDR_EXPR changed");
3282 return t;
3285 if (old_constant != new_constant)
3287 error ("constant not recomputed when ADDR_EXPR changed");
3288 return t;
3290 if (old_side_effects != new_side_effects)
3292 error ("side effects not recomputed when ADDR_EXPR changed");
3293 return t;
3296 /* Skip any references (they will be checked when we recurse down the
3297 tree) and ensure that any variable used as a prefix is marked
3298 addressable. */
3299 for (x = TREE_OPERAND (t, 0);
3300 handled_component_p (x);
3301 x = TREE_OPERAND (x, 0))
3304 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3305 return NULL;
3306 if (!TREE_ADDRESSABLE (x))
3308 error ("address taken, but ADDRESSABLE bit not set");
3309 return x;
3311 break;
3314 case COND_EXPR:
3315 x = COND_EXPR_COND (t);
3316 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3318 error ("non-boolean used in condition");
3319 return x;
3321 if (!is_gimple_condexpr (x))
3323 error ("invalid conditional operand");
3324 return x;
3326 break;
3328 case NOP_EXPR:
3329 case CONVERT_EXPR:
3330 case FIX_TRUNC_EXPR:
3331 case FLOAT_EXPR:
3332 case NEGATE_EXPR:
3333 case ABS_EXPR:
3334 case BIT_NOT_EXPR:
3335 case NON_LVALUE_EXPR:
3336 case TRUTH_NOT_EXPR:
3337 CHECK_OP (0, "invalid operand to unary operator");
3338 break;
3340 case REALPART_EXPR:
3341 case IMAGPART_EXPR:
3342 case COMPONENT_REF:
3343 case ARRAY_REF:
3344 case ARRAY_RANGE_REF:
3345 case BIT_FIELD_REF:
3346 case VIEW_CONVERT_EXPR:
3347 /* We have a nest of references. Verify that each of the operands
3348 that determine where to reference is either a constant or a variable,
3349 verify that the base is valid, and then show we've already checked
3350 the subtrees. */
3351 while (handled_component_p (t))
3353 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3354 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3355 else if (TREE_CODE (t) == ARRAY_REF
3356 || TREE_CODE (t) == ARRAY_RANGE_REF)
3358 CHECK_OP (1, "invalid array index");
3359 if (TREE_OPERAND (t, 2))
3360 CHECK_OP (2, "invalid array lower bound");
3361 if (TREE_OPERAND (t, 3))
3362 CHECK_OP (3, "invalid array stride");
3364 else if (TREE_CODE (t) == BIT_FIELD_REF)
3366 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3367 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3370 t = TREE_OPERAND (t, 0);
3373 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3375 error ("invalid reference prefix");
3376 return t;
3378 *walk_subtrees = 0;
3379 break;
3381 case LT_EXPR:
3382 case LE_EXPR:
3383 case GT_EXPR:
3384 case GE_EXPR:
3385 case EQ_EXPR:
3386 case NE_EXPR:
3387 case UNORDERED_EXPR:
3388 case ORDERED_EXPR:
3389 case UNLT_EXPR:
3390 case UNLE_EXPR:
3391 case UNGT_EXPR:
3392 case UNGE_EXPR:
3393 case UNEQ_EXPR:
3394 case LTGT_EXPR:
3395 case PLUS_EXPR:
3396 case MINUS_EXPR:
3397 case MULT_EXPR:
3398 case TRUNC_DIV_EXPR:
3399 case CEIL_DIV_EXPR:
3400 case FLOOR_DIV_EXPR:
3401 case ROUND_DIV_EXPR:
3402 case TRUNC_MOD_EXPR:
3403 case CEIL_MOD_EXPR:
3404 case FLOOR_MOD_EXPR:
3405 case ROUND_MOD_EXPR:
3406 case RDIV_EXPR:
3407 case EXACT_DIV_EXPR:
3408 case MIN_EXPR:
3409 case MAX_EXPR:
3410 case LSHIFT_EXPR:
3411 case RSHIFT_EXPR:
3412 case LROTATE_EXPR:
3413 case RROTATE_EXPR:
3414 case BIT_IOR_EXPR:
3415 case BIT_XOR_EXPR:
3416 case BIT_AND_EXPR:
3417 CHECK_OP (0, "invalid operand to binary operator");
3418 CHECK_OP (1, "invalid operand to binary operator");
3419 break;
3421 case CONSTRUCTOR:
3422 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3423 *walk_subtrees = 0;
3424 break;
3426 default:
3427 break;
3429 return NULL;
3431 #undef CHECK_OP
3435 /* Verify STMT, return true if STMT is not in GIMPLE form.
3436 TODO: Implement type checking. */
3438 static bool
3439 verify_stmt (tree stmt, bool last_in_block)
3441 tree addr;
3443 if (OMP_DIRECTIVE_P (stmt))
3445 /* OpenMP directives are validated by the FE and never operated
3446 on by the optimizers. Furthermore, OMP_FOR may contain
3447 non-gimple expressions when the main index variable has had
3448 its address taken. This does not affect the loop itself
3449 because the header of an OMP_FOR is merely used to determine
3450 how to setup the parallel iteration. */
3451 return false;
3454 if (!is_gimple_stmt (stmt))
3456 error ("is not a valid GIMPLE statement");
3457 goto fail;
3460 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3461 if (addr)
3463 debug_generic_stmt (addr);
3464 return true;
3467 /* If the statement is marked as part of an EH region, then it is
3468 expected that the statement could throw. Verify that when we
3469 have optimizations that simplify statements such that we prove
3470 that they cannot throw, that we update other data structures
3471 to match. */
3472 if (lookup_stmt_eh_region (stmt) >= 0)
3474 if (!tree_could_throw_p (stmt))
3476 error ("statement marked for throw, but doesn%'t");
3477 goto fail;
3479 if (!last_in_block && tree_can_throw_internal (stmt))
3481 error ("statement marked for throw in middle of block");
3482 goto fail;
3486 return false;
3488 fail:
3489 debug_generic_stmt (stmt);
3490 return true;
3494 /* Return true when the T can be shared. */
3496 static bool
3497 tree_node_can_be_shared (tree t)
3499 if (IS_TYPE_OR_DECL_P (t)
3500 || is_gimple_min_invariant (t)
3501 || TREE_CODE (t) == SSA_NAME
3502 || t == error_mark_node
3503 || TREE_CODE (t) == IDENTIFIER_NODE)
3504 return true;
3506 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3507 return true;
3509 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3510 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3511 || TREE_CODE (t) == COMPONENT_REF
3512 || TREE_CODE (t) == REALPART_EXPR
3513 || TREE_CODE (t) == IMAGPART_EXPR)
3514 t = TREE_OPERAND (t, 0);
3516 if (DECL_P (t))
3517 return true;
3519 return false;
3523 /* Called via walk_trees. Verify tree sharing. */
3525 static tree
3526 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3528 htab_t htab = (htab_t) data;
3529 void **slot;
3531 if (tree_node_can_be_shared (*tp))
3533 *walk_subtrees = false;
3534 return NULL;
3537 slot = htab_find_slot (htab, *tp, INSERT);
3538 if (*slot)
3539 return (tree) *slot;
3540 *slot = *tp;
3542 return NULL;
3546 /* Helper function for verify_gimple_tuples. */
3548 static tree
3549 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3550 void *data ATTRIBUTE_UNUSED)
3552 switch (TREE_CODE (*tp))
3554 case MODIFY_EXPR:
3555 error ("unexpected non-tuple");
3556 debug_tree (*tp);
3557 gcc_unreachable ();
3558 return NULL_TREE;
3560 default:
3561 return NULL_TREE;
3565 /* Verify that there are no trees that should have been converted to
3566 gimple tuples. Return true if T contains a node that should have
3567 been converted to a gimple tuple, but hasn't. */
3569 static bool
3570 verify_gimple_tuples (tree t)
3572 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
3575 /* Verify the GIMPLE statement chain. */
3577 void
3578 verify_stmts (void)
3580 basic_block bb;
3581 block_stmt_iterator bsi;
3582 bool err = false;
3583 htab_t htab;
3584 tree addr;
3586 timevar_push (TV_TREE_STMT_VERIFY);
3587 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3589 FOR_EACH_BB (bb)
3591 tree phi;
3592 int i;
3594 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3596 int phi_num_args = PHI_NUM_ARGS (phi);
3598 if (bb_for_stmt (phi) != bb)
3600 error ("bb_for_stmt (phi) is set to a wrong basic block");
3601 err |= true;
3604 for (i = 0; i < phi_num_args; i++)
3606 tree t = PHI_ARG_DEF (phi, i);
3607 tree addr;
3609 /* Addressable variables do have SSA_NAMEs but they
3610 are not considered gimple values. */
3611 if (TREE_CODE (t) != SSA_NAME
3612 && TREE_CODE (t) != FUNCTION_DECL
3613 && !is_gimple_val (t))
3615 error ("PHI def is not a GIMPLE value");
3616 debug_generic_stmt (phi);
3617 debug_generic_stmt (t);
3618 err |= true;
3621 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3622 if (addr)
3624 debug_generic_stmt (addr);
3625 err |= true;
3628 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3629 if (addr)
3631 error ("incorrect sharing of tree nodes");
3632 debug_generic_stmt (phi);
3633 debug_generic_stmt (addr);
3634 err |= true;
3639 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3641 tree stmt = bsi_stmt (bsi);
3643 err |= verify_gimple_tuples (stmt);
3645 if (bb_for_stmt (stmt) != bb)
3647 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3648 err |= true;
3651 bsi_next (&bsi);
3652 err |= verify_stmt (stmt, bsi_end_p (bsi));
3653 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3654 if (addr)
3656 error ("incorrect sharing of tree nodes");
3657 debug_generic_stmt (stmt);
3658 debug_generic_stmt (addr);
3659 err |= true;
3664 if (err)
3665 internal_error ("verify_stmts failed");
3667 htab_delete (htab);
3668 timevar_pop (TV_TREE_STMT_VERIFY);
3672 /* Verifies that the flow information is OK. */
3674 static int
3675 tree_verify_flow_info (void)
3677 int err = 0;
3678 basic_block bb;
3679 block_stmt_iterator bsi;
3680 tree stmt;
3681 edge e;
3682 edge_iterator ei;
3684 if (ENTRY_BLOCK_PTR->stmt_list)
3686 error ("ENTRY_BLOCK has a statement list associated with it");
3687 err = 1;
3690 if (EXIT_BLOCK_PTR->stmt_list)
3692 error ("EXIT_BLOCK has a statement list associated with it");
3693 err = 1;
3696 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3697 if (e->flags & EDGE_FALLTHRU)
3699 error ("fallthru to exit from bb %d", e->src->index);
3700 err = 1;
3703 FOR_EACH_BB (bb)
3705 bool found_ctrl_stmt = false;
3707 stmt = NULL_TREE;
3709 /* Skip labels on the start of basic block. */
3710 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3712 tree prev_stmt = stmt;
3714 stmt = bsi_stmt (bsi);
3716 if (TREE_CODE (stmt) != LABEL_EXPR)
3717 break;
3719 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3721 error ("nonlocal label ");
3722 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3723 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3724 bb->index);
3725 err = 1;
3728 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3730 error ("label ");
3731 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3732 fprintf (stderr, " to block does not match in bb %d",
3733 bb->index);
3734 err = 1;
3737 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3738 != current_function_decl)
3740 error ("label ");
3741 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3742 fprintf (stderr, " has incorrect context in bb %d",
3743 bb->index);
3744 err = 1;
3748 /* Verify that body of basic block BB is free of control flow. */
3749 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3751 tree stmt = bsi_stmt (bsi);
3753 if (found_ctrl_stmt)
3755 error ("control flow in the middle of basic block %d",
3756 bb->index);
3757 err = 1;
3760 if (stmt_ends_bb_p (stmt))
3761 found_ctrl_stmt = true;
3763 if (TREE_CODE (stmt) == LABEL_EXPR)
3765 error ("label ");
3766 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3767 fprintf (stderr, " in the middle of basic block %d", bb->index);
3768 err = 1;
3772 bsi = bsi_last (bb);
3773 if (bsi_end_p (bsi))
3774 continue;
3776 stmt = bsi_stmt (bsi);
3778 err |= verify_eh_edges (stmt);
3780 if (is_ctrl_stmt (stmt))
3782 FOR_EACH_EDGE (e, ei, bb->succs)
3783 if (e->flags & EDGE_FALLTHRU)
3785 error ("fallthru edge after a control statement in bb %d",
3786 bb->index);
3787 err = 1;
3791 if (TREE_CODE (stmt) != COND_EXPR)
3793 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3794 after anything else but if statement. */
3795 FOR_EACH_EDGE (e, ei, bb->succs)
3796 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3798 error ("true/false edge after a non-COND_EXPR in bb %d",
3799 bb->index);
3800 err = 1;
3804 switch (TREE_CODE (stmt))
3806 case COND_EXPR:
3808 edge true_edge;
3809 edge false_edge;
3810 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3811 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3813 error ("structured COND_EXPR at the end of bb %d", bb->index);
3814 err = 1;
3817 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3819 if (!true_edge || !false_edge
3820 || !(true_edge->flags & EDGE_TRUE_VALUE)
3821 || !(false_edge->flags & EDGE_FALSE_VALUE)
3822 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3823 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3824 || EDGE_COUNT (bb->succs) >= 3)
3826 error ("wrong outgoing edge flags at end of bb %d",
3827 bb->index);
3828 err = 1;
3831 if (!has_label_p (true_edge->dest,
3832 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3834 error ("%<then%> label does not match edge at end of bb %d",
3835 bb->index);
3836 err = 1;
3839 if (!has_label_p (false_edge->dest,
3840 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3842 error ("%<else%> label does not match edge at end of bb %d",
3843 bb->index);
3844 err = 1;
3847 break;
3849 case GOTO_EXPR:
3850 if (simple_goto_p (stmt))
3852 error ("explicit goto at end of bb %d", bb->index);
3853 err = 1;
3855 else
3857 /* FIXME. We should double check that the labels in the
3858 destination blocks have their address taken. */
3859 FOR_EACH_EDGE (e, ei, bb->succs)
3860 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3861 | EDGE_FALSE_VALUE))
3862 || !(e->flags & EDGE_ABNORMAL))
3864 error ("wrong outgoing edge flags at end of bb %d",
3865 bb->index);
3866 err = 1;
3869 break;
3871 case RETURN_EXPR:
3872 if (!single_succ_p (bb)
3873 || (single_succ_edge (bb)->flags
3874 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3875 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3877 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3878 err = 1;
3880 if (single_succ (bb) != EXIT_BLOCK_PTR)
3882 error ("return edge does not point to exit in bb %d",
3883 bb->index);
3884 err = 1;
3886 break;
3888 case SWITCH_EXPR:
3890 tree prev;
3891 edge e;
3892 size_t i, n;
3893 tree vec;
3895 vec = SWITCH_LABELS (stmt);
3896 n = TREE_VEC_LENGTH (vec);
3898 /* Mark all the destination basic blocks. */
3899 for (i = 0; i < n; ++i)
3901 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3902 basic_block label_bb = label_to_block (lab);
3904 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3905 label_bb->aux = (void *)1;
3908 /* Verify that the case labels are sorted. */
3909 prev = TREE_VEC_ELT (vec, 0);
3910 for (i = 1; i < n - 1; ++i)
3912 tree c = TREE_VEC_ELT (vec, i);
3913 if (! CASE_LOW (c))
3915 error ("found default case not at end of case vector");
3916 err = 1;
3917 continue;
3919 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3921 error ("case labels not sorted: ");
3922 print_generic_expr (stderr, prev, 0);
3923 fprintf (stderr," is greater than ");
3924 print_generic_expr (stderr, c, 0);
3925 fprintf (stderr," but comes before it.\n");
3926 err = 1;
3928 prev = c;
3930 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3932 error ("no default case found at end of case vector");
3933 err = 1;
3936 FOR_EACH_EDGE (e, ei, bb->succs)
3938 if (!e->dest->aux)
3940 error ("extra outgoing edge %d->%d",
3941 bb->index, e->dest->index);
3942 err = 1;
3944 e->dest->aux = (void *)2;
3945 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3946 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3948 error ("wrong outgoing edge flags at end of bb %d",
3949 bb->index);
3950 err = 1;
3954 /* Check that we have all of them. */
3955 for (i = 0; i < n; ++i)
3957 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3958 basic_block label_bb = label_to_block (lab);
3960 if (label_bb->aux != (void *)2)
3962 error ("missing edge %i->%i",
3963 bb->index, label_bb->index);
3964 err = 1;
3968 FOR_EACH_EDGE (e, ei, bb->succs)
3969 e->dest->aux = (void *)0;
3972 default: ;
3976 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3977 verify_dominators (CDI_DOMINATORS);
3979 return err;
3983 /* Updates phi nodes after creating a forwarder block joined
3984 by edge FALLTHRU. */
3986 static void
3987 tree_make_forwarder_block (edge fallthru)
3989 edge e;
3990 edge_iterator ei;
3991 basic_block dummy, bb;
3992 tree phi, new_phi, var;
3994 dummy = fallthru->src;
3995 bb = fallthru->dest;
3997 if (single_pred_p (bb))
3998 return;
4000 /* If we redirected a branch we must create new phi nodes at the
4001 start of BB. */
4002 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4004 var = PHI_RESULT (phi);
4005 new_phi = create_phi_node (var, bb);
4006 SSA_NAME_DEF_STMT (var) = new_phi;
4007 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4008 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4011 /* Ensure that the PHI node chain is in the same order. */
4012 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4014 /* Add the arguments we have stored on edges. */
4015 FOR_EACH_EDGE (e, ei, bb->preds)
4017 if (e == fallthru)
4018 continue;
4020 flush_pending_stmts (e);
4025 /* Return a non-special label in the head of basic block BLOCK.
4026 Create one if it doesn't exist. */
4028 tree
4029 tree_block_label (basic_block bb)
4031 block_stmt_iterator i, s = bsi_start (bb);
4032 bool first = true;
4033 tree label, stmt;
4035 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4037 stmt = bsi_stmt (i);
4038 if (TREE_CODE (stmt) != LABEL_EXPR)
4039 break;
4040 label = LABEL_EXPR_LABEL (stmt);
4041 if (!DECL_NONLOCAL (label))
4043 if (!first)
4044 bsi_move_before (&i, &s);
4045 return label;
4049 label = create_artificial_label ();
4050 stmt = build1 (LABEL_EXPR, void_type_node, label);
4051 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4052 return label;
4056 /* Attempt to perform edge redirection by replacing a possibly complex
4057 jump instruction by a goto or by removing the jump completely.
4058 This can apply only if all edges now point to the same block. The
4059 parameters and return values are equivalent to
4060 redirect_edge_and_branch. */
4062 static edge
4063 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4065 basic_block src = e->src;
4066 block_stmt_iterator b;
4067 tree stmt;
4069 /* We can replace or remove a complex jump only when we have exactly
4070 two edges. */
4071 if (EDGE_COUNT (src->succs) != 2
4072 /* Verify that all targets will be TARGET. Specifically, the
4073 edge that is not E must also go to TARGET. */
4074 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4075 return NULL;
4077 b = bsi_last (src);
4078 if (bsi_end_p (b))
4079 return NULL;
4080 stmt = bsi_stmt (b);
4082 if (TREE_CODE (stmt) == COND_EXPR
4083 || TREE_CODE (stmt) == SWITCH_EXPR)
4085 bsi_remove (&b, true);
4086 e = ssa_redirect_edge (e, target);
4087 e->flags = EDGE_FALLTHRU;
4088 return e;
4091 return NULL;
4095 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4096 edge representing the redirected branch. */
4098 static edge
4099 tree_redirect_edge_and_branch (edge e, basic_block dest)
4101 basic_block bb = e->src;
4102 block_stmt_iterator bsi;
4103 edge ret;
4104 tree label, stmt;
4106 if (e->flags & EDGE_ABNORMAL)
4107 return NULL;
4109 if (e->src != ENTRY_BLOCK_PTR
4110 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4111 return ret;
4113 if (e->dest == dest)
4114 return NULL;
4116 label = tree_block_label (dest);
4118 bsi = bsi_last (bb);
4119 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4121 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4123 case COND_EXPR:
4124 stmt = (e->flags & EDGE_TRUE_VALUE
4125 ? COND_EXPR_THEN (stmt)
4126 : COND_EXPR_ELSE (stmt));
4127 GOTO_DESTINATION (stmt) = label;
4128 break;
4130 case GOTO_EXPR:
4131 /* No non-abnormal edges should lead from a non-simple goto, and
4132 simple ones should be represented implicitly. */
4133 gcc_unreachable ();
4135 case SWITCH_EXPR:
4137 tree cases = get_cases_for_edge (e, stmt);
4139 /* If we have a list of cases associated with E, then use it
4140 as it's a lot faster than walking the entire case vector. */
4141 if (cases)
4143 edge e2 = find_edge (e->src, dest);
4144 tree last, first;
4146 first = cases;
4147 while (cases)
4149 last = cases;
4150 CASE_LABEL (cases) = label;
4151 cases = TREE_CHAIN (cases);
4154 /* If there was already an edge in the CFG, then we need
4155 to move all the cases associated with E to E2. */
4156 if (e2)
4158 tree cases2 = get_cases_for_edge (e2, stmt);
4160 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4161 TREE_CHAIN (cases2) = first;
4164 else
4166 tree vec = SWITCH_LABELS (stmt);
4167 size_t i, n = TREE_VEC_LENGTH (vec);
4169 for (i = 0; i < n; i++)
4171 tree elt = TREE_VEC_ELT (vec, i);
4173 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4174 CASE_LABEL (elt) = label;
4178 break;
4181 case RETURN_EXPR:
4182 bsi_remove (&bsi, true);
4183 e->flags |= EDGE_FALLTHRU;
4184 break;
4186 default:
4187 /* Otherwise it must be a fallthru edge, and we don't need to
4188 do anything besides redirecting it. */
4189 gcc_assert (e->flags & EDGE_FALLTHRU);
4190 break;
4193 /* Update/insert PHI nodes as necessary. */
4195 /* Now update the edges in the CFG. */
4196 e = ssa_redirect_edge (e, dest);
4198 return e;
4202 /* Simple wrapper, as we can always redirect fallthru edges. */
4204 static basic_block
4205 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4207 e = tree_redirect_edge_and_branch (e, dest);
4208 gcc_assert (e);
4210 return NULL;
4214 /* Splits basic block BB after statement STMT (but at least after the
4215 labels). If STMT is NULL, BB is split just after the labels. */
4217 static basic_block
4218 tree_split_block (basic_block bb, void *stmt)
4220 block_stmt_iterator bsi;
4221 tree_stmt_iterator tsi_tgt;
4222 tree act;
4223 basic_block new_bb;
4224 edge e;
4225 edge_iterator ei;
4227 new_bb = create_empty_bb (bb);
4229 /* Redirect the outgoing edges. */
4230 new_bb->succs = bb->succs;
4231 bb->succs = NULL;
4232 FOR_EACH_EDGE (e, ei, new_bb->succs)
4233 e->src = new_bb;
4235 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4236 stmt = NULL;
4238 /* Move everything from BSI to the new basic block. */
4239 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4241 act = bsi_stmt (bsi);
4242 if (TREE_CODE (act) == LABEL_EXPR)
4243 continue;
4245 if (!stmt)
4246 break;
4248 if (stmt == act)
4250 bsi_next (&bsi);
4251 break;
4255 if (bsi_end_p (bsi))
4256 return new_bb;
4258 /* Split the statement list - avoid re-creating new containers as this
4259 brings ugly quadratic memory consumption in the inliner.
4260 (We are still quadratic since we need to update stmt BB pointers,
4261 sadly.) */
4262 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4263 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4264 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4265 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4267 return new_bb;
4271 /* Moves basic block BB after block AFTER. */
4273 static bool
4274 tree_move_block_after (basic_block bb, basic_block after)
4276 if (bb->prev_bb == after)
4277 return true;
4279 unlink_block (bb);
4280 link_block (bb, after);
4282 return true;
4286 /* Return true if basic_block can be duplicated. */
4288 static bool
4289 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4291 return true;
4295 /* Create a duplicate of the basic block BB. NOTE: This does not
4296 preserve SSA form. */
4298 static basic_block
4299 tree_duplicate_bb (basic_block bb)
4301 basic_block new_bb;
4302 block_stmt_iterator bsi, bsi_tgt;
4303 tree phi;
4305 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4307 /* Copy the PHI nodes. We ignore PHI node arguments here because
4308 the incoming edges have not been setup yet. */
4309 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4311 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4312 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4315 /* Keep the chain of PHI nodes in the same order so that they can be
4316 updated by ssa_redirect_edge. */
4317 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4319 bsi_tgt = bsi_start (new_bb);
4320 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4322 def_operand_p def_p;
4323 ssa_op_iter op_iter;
4324 tree stmt, copy;
4325 int region;
4327 stmt = bsi_stmt (bsi);
4328 if (TREE_CODE (stmt) == LABEL_EXPR)
4329 continue;
4331 /* Create a new copy of STMT and duplicate STMT's virtual
4332 operands. */
4333 copy = unshare_expr (stmt);
4334 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4335 copy_virtual_operands (copy, stmt);
4336 region = lookup_stmt_eh_region (stmt);
4337 if (region >= 0)
4338 add_stmt_to_eh_region (copy, region);
4340 /* Create new names for all the definitions created by COPY and
4341 add replacement mappings for each new name. */
4342 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4343 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4346 return new_bb;
4350 /* Basic block BB_COPY was created by code duplication. Add phi node
4351 arguments for edges going out of BB_COPY. The blocks that were
4352 duplicated have BB_DUPLICATED set. */
4354 void
4355 add_phi_args_after_copy_bb (basic_block bb_copy)
4357 basic_block bb, dest;
4358 edge e, e_copy;
4359 edge_iterator ei;
4360 tree phi, phi_copy, phi_next, def;
4362 bb = get_bb_original (bb_copy);
4364 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4366 if (!phi_nodes (e_copy->dest))
4367 continue;
4369 if (e_copy->dest->flags & BB_DUPLICATED)
4370 dest = get_bb_original (e_copy->dest);
4371 else
4372 dest = e_copy->dest;
4374 e = find_edge (bb, dest);
4375 if (!e)
4377 /* During loop unrolling the target of the latch edge is copied.
4378 In this case we are not looking for edge to dest, but to
4379 duplicated block whose original was dest. */
4380 FOR_EACH_EDGE (e, ei, bb->succs)
4381 if ((e->dest->flags & BB_DUPLICATED)
4382 && get_bb_original (e->dest) == dest)
4383 break;
4385 gcc_assert (e != NULL);
4388 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4389 phi;
4390 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4392 phi_next = PHI_CHAIN (phi);
4393 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4394 add_phi_arg (phi_copy, def, e_copy);
4399 /* Blocks in REGION_COPY array of length N_REGION were created by
4400 duplication of basic blocks. Add phi node arguments for edges
4401 going from these blocks. */
4403 void
4404 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4406 unsigned i;
4408 for (i = 0; i < n_region; i++)
4409 region_copy[i]->flags |= BB_DUPLICATED;
4411 for (i = 0; i < n_region; i++)
4412 add_phi_args_after_copy_bb (region_copy[i]);
4414 for (i = 0; i < n_region; i++)
4415 region_copy[i]->flags &= ~BB_DUPLICATED;
4418 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4419 important exit edge EXIT. By important we mean that no SSA name defined
4420 inside region is live over the other exit edges of the region. All entry
4421 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4422 to the duplicate of the region. SSA form, dominance and loop information
4423 is updated. The new basic blocks are stored to REGION_COPY in the same
4424 order as they had in REGION, provided that REGION_COPY is not NULL.
4425 The function returns false if it is unable to copy the region,
4426 true otherwise. */
4428 bool
4429 tree_duplicate_sese_region (edge entry, edge exit,
4430 basic_block *region, unsigned n_region,
4431 basic_block *region_copy)
4433 unsigned i, n_doms;
4434 bool free_region_copy = false, copying_header = false;
4435 struct loop *loop = entry->dest->loop_father;
4436 edge exit_copy;
4437 basic_block *doms;
4438 edge redirected;
4439 int total_freq = 0, entry_freq = 0;
4440 gcov_type total_count = 0, entry_count = 0;
4442 if (!can_copy_bbs_p (region, n_region))
4443 return false;
4445 /* Some sanity checking. Note that we do not check for all possible
4446 missuses of the functions. I.e. if you ask to copy something weird,
4447 it will work, but the state of structures probably will not be
4448 correct. */
4449 for (i = 0; i < n_region; i++)
4451 /* We do not handle subloops, i.e. all the blocks must belong to the
4452 same loop. */
4453 if (region[i]->loop_father != loop)
4454 return false;
4456 if (region[i] != entry->dest
4457 && region[i] == loop->header)
4458 return false;
4461 loop->copy = loop;
4463 /* In case the function is used for loop header copying (which is the primary
4464 use), ensure that EXIT and its copy will be new latch and entry edges. */
4465 if (loop->header == entry->dest)
4467 copying_header = true;
4468 loop->copy = loop->outer;
4470 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4471 return false;
4473 for (i = 0; i < n_region; i++)
4474 if (region[i] != exit->src
4475 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4476 return false;
4479 if (!region_copy)
4481 region_copy = XNEWVEC (basic_block, n_region);
4482 free_region_copy = true;
4485 gcc_assert (!need_ssa_update_p ());
4487 /* Record blocks outside the region that are dominated by something
4488 inside. */
4489 doms = XNEWVEC (basic_block, n_basic_blocks);
4490 initialize_original_copy_tables ();
4492 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4494 if (entry->dest->count)
4496 total_count = entry->dest->count;
4497 entry_count = entry->count;
4498 /* Fix up corner cases, to avoid division by zero or creation of negative
4499 frequencies. */
4500 if (entry_count > total_count)
4501 entry_count = total_count;
4503 else
4505 total_freq = entry->dest->frequency;
4506 entry_freq = EDGE_FREQUENCY (entry);
4507 /* Fix up corner cases, to avoid division by zero or creation of negative
4508 frequencies. */
4509 if (total_freq == 0)
4510 total_freq = 1;
4511 else if (entry_freq > total_freq)
4512 entry_freq = total_freq;
4515 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4516 split_edge_bb_loc (entry));
4517 if (total_count)
4519 scale_bbs_frequencies_gcov_type (region, n_region,
4520 total_count - entry_count,
4521 total_count);
4522 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4523 total_count);
4525 else
4527 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4528 total_freq);
4529 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4532 if (copying_header)
4534 loop->header = exit->dest;
4535 loop->latch = exit->src;
4538 /* Redirect the entry and add the phi node arguments. */
4539 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4540 gcc_assert (redirected != NULL);
4541 flush_pending_stmts (entry);
4543 /* Concerning updating of dominators: We must recount dominators
4544 for entry block and its copy. Anything that is outside of the
4545 region, but was dominated by something inside needs recounting as
4546 well. */
4547 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4548 doms[n_doms++] = get_bb_original (entry->dest);
4549 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4550 free (doms);
4552 /* Add the other PHI node arguments. */
4553 add_phi_args_after_copy (region_copy, n_region);
4555 /* Update the SSA web. */
4556 update_ssa (TODO_update_ssa);
4558 if (free_region_copy)
4559 free (region_copy);
4561 free_original_copy_tables ();
4562 return true;
4566 DEF_VEC_P(basic_block);
4567 DEF_VEC_ALLOC_P(basic_block,heap);
4570 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4571 adding blocks when the dominator traversal reaches EXIT. This
4572 function silently assumes that ENTRY strictly dominates EXIT. */
4574 static void
4575 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4576 VEC(basic_block,heap) **bbs_p)
4578 basic_block son;
4580 for (son = first_dom_son (CDI_DOMINATORS, entry);
4581 son;
4582 son = next_dom_son (CDI_DOMINATORS, son))
4584 VEC_safe_push (basic_block, heap, *bbs_p, son);
4585 if (son != exit)
4586 gather_blocks_in_sese_region (son, exit, bbs_p);
4591 struct move_stmt_d
4593 tree block;
4594 tree from_context;
4595 tree to_context;
4596 bitmap vars_to_remove;
4597 htab_t new_label_map;
4598 bool remap_decls_p;
4601 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4602 contained in *TP and change the DECL_CONTEXT of every local
4603 variable referenced in *TP. */
4605 static tree
4606 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4608 struct move_stmt_d *p = (struct move_stmt_d *) data;
4609 tree t = *tp;
4611 if (p->block
4612 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
4613 TREE_BLOCK (t) = p->block;
4615 if (OMP_DIRECTIVE_P (t)
4616 && TREE_CODE (t) != OMP_RETURN
4617 && TREE_CODE (t) != OMP_CONTINUE)
4619 /* Do not remap variables inside OMP directives. Variables
4620 referenced in clauses and directive header belong to the
4621 parent function and should not be moved into the child
4622 function. */
4623 bool save_remap_decls_p = p->remap_decls_p;
4624 p->remap_decls_p = false;
4625 *walk_subtrees = 0;
4627 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4629 p->remap_decls_p = save_remap_decls_p;
4631 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4633 if (TREE_CODE (t) == LABEL_DECL)
4635 if (p->new_label_map)
4637 struct tree_map in, *out;
4638 in.from = t;
4639 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4640 if (out)
4641 *tp = t = out->to;
4644 DECL_CONTEXT (t) = p->to_context;
4646 else if (p->remap_decls_p)
4648 DECL_CONTEXT (t) = p->to_context;
4650 if (TREE_CODE (t) == VAR_DECL)
4652 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4653 f->unexpanded_var_list
4654 = tree_cons (0, t, f->unexpanded_var_list);
4656 /* Mark T to be removed from the original function,
4657 otherwise it will be given a DECL_RTL when the
4658 original function is expanded. */
4659 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4663 else if (TYPE_P (t))
4664 *walk_subtrees = 0;
4666 return NULL_TREE;
4670 /* Move basic block BB from function CFUN to function DEST_FN. The
4671 block is moved out of the original linked list and placed after
4672 block AFTER in the new list. Also, the block is removed from the
4673 original array of blocks and placed in DEST_FN's array of blocks.
4674 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4675 updated to reflect the moved edges.
4677 On exit, local variables that need to be removed from
4678 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4680 static void
4681 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4682 basic_block after, bool update_edge_count_p,
4683 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4685 struct control_flow_graph *cfg;
4686 edge_iterator ei;
4687 edge e;
4688 block_stmt_iterator si;
4689 struct move_stmt_d d;
4690 unsigned old_len, new_len;
4691 basic_block *addr;
4693 /* Link BB to the new linked list. */
4694 move_block_after (bb, after);
4696 /* Update the edge count in the corresponding flowgraphs. */
4697 if (update_edge_count_p)
4698 FOR_EACH_EDGE (e, ei, bb->succs)
4700 cfun->cfg->x_n_edges--;
4701 dest_cfun->cfg->x_n_edges++;
4704 /* Remove BB from the original basic block array. */
4705 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4706 cfun->cfg->x_n_basic_blocks--;
4708 /* Grow DEST_CFUN's basic block array if needed. */
4709 cfg = dest_cfun->cfg;
4710 cfg->x_n_basic_blocks++;
4711 if (bb->index > cfg->x_last_basic_block)
4712 cfg->x_last_basic_block = bb->index;
4714 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4715 if ((unsigned) cfg->x_last_basic_block >= old_len)
4717 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4718 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4719 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4720 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4723 VEC_replace (basic_block, cfg->x_basic_block_info,
4724 cfg->x_last_basic_block, bb);
4726 /* The statements in BB need to be associated with a new TREE_BLOCK.
4727 Labels need to be associated with a new label-to-block map. */
4728 memset (&d, 0, sizeof (d));
4729 d.vars_to_remove = vars_to_remove;
4731 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4733 tree stmt = bsi_stmt (si);
4734 int region;
4736 d.from_context = cfun->decl;
4737 d.to_context = dest_cfun->decl;
4738 d.remap_decls_p = true;
4739 d.new_label_map = new_label_map;
4740 if (TREE_BLOCK (stmt))
4741 d.block = DECL_INITIAL (dest_cfun->decl);
4743 walk_tree (&stmt, move_stmt_r, &d, NULL);
4745 if (TREE_CODE (stmt) == LABEL_EXPR)
4747 tree label = LABEL_EXPR_LABEL (stmt);
4748 int uid = LABEL_DECL_UID (label);
4750 gcc_assert (uid > -1);
4752 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4753 if (old_len <= (unsigned) uid)
4755 new_len = 3 * uid / 2;
4756 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4757 new_len);
4758 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4759 memset (&addr[old_len], 0,
4760 sizeof (basic_block) * (new_len - old_len));
4763 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4764 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4766 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4768 if (uid >= dest_cfun->last_label_uid)
4769 dest_cfun->last_label_uid = uid + 1;
4771 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4772 TREE_OPERAND (stmt, 0) =
4773 build_int_cst (NULL_TREE,
4774 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4775 + eh_offset);
4777 region = lookup_stmt_eh_region (stmt);
4778 if (region >= 0)
4780 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4781 remove_stmt_from_eh_region (stmt);
4786 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4787 the outermost EH region. Use REGION as the incoming base EH region. */
4789 static int
4790 find_outermost_region_in_block (struct function *src_cfun,
4791 basic_block bb, int region)
4793 block_stmt_iterator si;
4795 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4797 tree stmt = bsi_stmt (si);
4798 int stmt_region;
4800 if (TREE_CODE (stmt) == RESX_EXPR)
4801 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4802 else
4803 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4804 if (stmt_region > 0)
4806 if (region < 0)
4807 region = stmt_region;
4808 else if (stmt_region != region)
4810 region = eh_region_outermost (src_cfun, stmt_region, region);
4811 gcc_assert (region != -1);
4816 return region;
4819 static tree
4820 new_label_mapper (tree decl, void *data)
4822 htab_t hash = (htab_t) data;
4823 struct tree_map *m;
4824 void **slot;
4826 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4828 m = xmalloc (sizeof (struct tree_map));
4829 m->hash = DECL_UID (decl);
4830 m->from = decl;
4831 m->to = create_artificial_label ();
4832 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4834 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4835 gcc_assert (*slot == NULL);
4837 *slot = m;
4839 return m->to;
4842 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4843 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4844 single basic block in the original CFG and the new basic block is
4845 returned. DEST_CFUN must not have a CFG yet.
4847 Note that the region need not be a pure SESE region. Blocks inside
4848 the region may contain calls to abort/exit. The only restriction
4849 is that ENTRY_BB should be the only entry point and it must
4850 dominate EXIT_BB.
4852 All local variables referenced in the region are assumed to be in
4853 the corresponding BLOCK_VARS and unexpanded variable lists
4854 associated with DEST_CFUN. */
4856 basic_block
4857 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4858 basic_block exit_bb)
4860 VEC(basic_block,heap) *bbs;
4861 basic_block after, bb, *entry_pred, *exit_succ;
4862 struct function *saved_cfun;
4863 int *entry_flag, *exit_flag, eh_offset;
4864 unsigned i, num_entry_edges, num_exit_edges;
4865 edge e;
4866 edge_iterator ei;
4867 bitmap vars_to_remove;
4868 htab_t new_label_map;
4870 saved_cfun = cfun;
4872 /* Collect all the blocks in the region. Manually add ENTRY_BB
4873 because it won't be added by dfs_enumerate_from. */
4874 calculate_dominance_info (CDI_DOMINATORS);
4876 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4877 region. */
4878 gcc_assert (entry_bb != exit_bb
4879 && (!exit_bb
4880 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4882 bbs = NULL;
4883 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4884 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4886 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4887 the predecessor edges to ENTRY_BB and the successor edges to
4888 EXIT_BB so that we can re-attach them to the new basic block that
4889 will replace the region. */
4890 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4891 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4892 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4893 i = 0;
4894 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4896 entry_flag[i] = e->flags;
4897 entry_pred[i++] = e->src;
4898 remove_edge (e);
4901 if (exit_bb)
4903 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4904 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4905 sizeof (basic_block));
4906 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4907 i = 0;
4908 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4910 exit_flag[i] = e->flags;
4911 exit_succ[i++] = e->dest;
4912 remove_edge (e);
4915 else
4917 num_exit_edges = 0;
4918 exit_succ = NULL;
4919 exit_flag = NULL;
4922 /* Switch context to the child function to initialize DEST_FN's CFG. */
4923 gcc_assert (dest_cfun->cfg == NULL);
4924 cfun = dest_cfun;
4926 init_empty_tree_cfg ();
4928 /* Initialize EH information for the new function. */
4929 eh_offset = 0;
4930 new_label_map = NULL;
4931 if (saved_cfun->eh)
4933 int region = -1;
4935 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4936 region = find_outermost_region_in_block (saved_cfun, bb, region);
4938 init_eh_for_function ();
4939 if (region != -1)
4941 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4942 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4943 new_label_map, region, 0);
4947 cfun = saved_cfun;
4949 /* Move blocks from BBS into DEST_CFUN. */
4950 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4951 after = dest_cfun->cfg->x_entry_block_ptr;
4952 vars_to_remove = BITMAP_ALLOC (NULL);
4953 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4955 /* No need to update edge counts on the last block. It has
4956 already been updated earlier when we detached the region from
4957 the original CFG. */
4958 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4959 new_label_map, eh_offset);
4960 after = bb;
4963 if (new_label_map)
4964 htab_delete (new_label_map);
4966 /* Remove the variables marked in VARS_TO_REMOVE from
4967 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4968 DECL_RTL in the context of CFUN. */
4969 if (!bitmap_empty_p (vars_to_remove))
4971 tree *p;
4973 for (p = &cfun->unexpanded_var_list; *p; )
4975 tree var = TREE_VALUE (*p);
4976 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4978 *p = TREE_CHAIN (*p);
4979 continue;
4982 p = &TREE_CHAIN (*p);
4986 BITMAP_FREE (vars_to_remove);
4988 /* Rewire the entry and exit blocks. The successor to the entry
4989 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4990 the child function. Similarly, the predecessor of DEST_FN's
4991 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4992 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4993 various CFG manipulation function get to the right CFG.
4995 FIXME, this is silly. The CFG ought to become a parameter to
4996 these helpers. */
4997 cfun = dest_cfun;
4998 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4999 if (exit_bb)
5000 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
5001 cfun = saved_cfun;
5003 /* Back in the original function, the SESE region has disappeared,
5004 create a new basic block in its place. */
5005 bb = create_empty_bb (entry_pred[0]);
5006 for (i = 0; i < num_entry_edges; i++)
5007 make_edge (entry_pred[i], bb, entry_flag[i]);
5009 for (i = 0; i < num_exit_edges; i++)
5010 make_edge (bb, exit_succ[i], exit_flag[i]);
5012 if (exit_bb)
5014 free (exit_flag);
5015 free (exit_succ);
5017 free (entry_flag);
5018 free (entry_pred);
5019 free_dominance_info (CDI_DOMINATORS);
5020 free_dominance_info (CDI_POST_DOMINATORS);
5021 VEC_free (basic_block, heap, bbs);
5023 return bb;
5027 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5029 void
5030 dump_function_to_file (tree fn, FILE *file, int flags)
5032 tree arg, vars, var;
5033 bool ignore_topmost_bind = false, any_var = false;
5034 basic_block bb;
5035 tree chain;
5036 struct function *saved_cfun;
5038 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5040 arg = DECL_ARGUMENTS (fn);
5041 while (arg)
5043 print_generic_expr (file, arg, dump_flags);
5044 if (TREE_CHAIN (arg))
5045 fprintf (file, ", ");
5046 arg = TREE_CHAIN (arg);
5048 fprintf (file, ")\n");
5050 if (flags & TDF_DETAILS)
5051 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5052 if (flags & TDF_RAW)
5054 dump_node (fn, TDF_SLIM | flags, file);
5055 return;
5058 /* Switch CFUN to point to FN. */
5059 saved_cfun = cfun;
5060 cfun = DECL_STRUCT_FUNCTION (fn);
5062 /* When GIMPLE is lowered, the variables are no longer available in
5063 BIND_EXPRs, so display them separately. */
5064 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5066 ignore_topmost_bind = true;
5068 fprintf (file, "{\n");
5069 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5071 var = TREE_VALUE (vars);
5073 print_generic_decl (file, var, flags);
5074 fprintf (file, "\n");
5076 any_var = true;
5080 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5082 /* Make a CFG based dump. */
5083 check_bb_profile (ENTRY_BLOCK_PTR, file);
5084 if (!ignore_topmost_bind)
5085 fprintf (file, "{\n");
5087 if (any_var && n_basic_blocks)
5088 fprintf (file, "\n");
5090 FOR_EACH_BB (bb)
5091 dump_generic_bb (file, bb, 2, flags);
5093 fprintf (file, "}\n");
5094 check_bb_profile (EXIT_BLOCK_PTR, file);
5096 else
5098 int indent;
5100 /* Make a tree based dump. */
5101 chain = DECL_SAVED_TREE (fn);
5103 if (chain && TREE_CODE (chain) == BIND_EXPR)
5105 if (ignore_topmost_bind)
5107 chain = BIND_EXPR_BODY (chain);
5108 indent = 2;
5110 else
5111 indent = 0;
5113 else
5115 if (!ignore_topmost_bind)
5116 fprintf (file, "{\n");
5117 indent = 2;
5120 if (any_var)
5121 fprintf (file, "\n");
5123 print_generic_stmt_indented (file, chain, flags, indent);
5124 if (ignore_topmost_bind)
5125 fprintf (file, "}\n");
5128 fprintf (file, "\n\n");
5130 /* Restore CFUN. */
5131 cfun = saved_cfun;
5135 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5137 void
5138 debug_function (tree fn, int flags)
5140 dump_function_to_file (fn, stderr, flags);
5144 /* Pretty print of the loops intermediate representation. */
5145 static void print_loop (FILE *, struct loop *, int);
5146 static void print_pred_bbs (FILE *, basic_block bb);
5147 static void print_succ_bbs (FILE *, basic_block bb);
5150 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5152 static void
5153 print_pred_bbs (FILE *file, basic_block bb)
5155 edge e;
5156 edge_iterator ei;
5158 FOR_EACH_EDGE (e, ei, bb->preds)
5159 fprintf (file, "bb_%d ", e->src->index);
5163 /* Print on FILE the indexes for the successors of basic_block BB. */
5165 static void
5166 print_succ_bbs (FILE *file, basic_block bb)
5168 edge e;
5169 edge_iterator ei;
5171 FOR_EACH_EDGE (e, ei, bb->succs)
5172 fprintf (file, "bb_%d ", e->dest->index);
5176 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5178 static void
5179 print_loop (FILE *file, struct loop *loop, int indent)
5181 char *s_indent;
5182 basic_block bb;
5184 if (loop == NULL)
5185 return;
5187 s_indent = (char *) alloca ((size_t) indent + 1);
5188 memset ((void *) s_indent, ' ', (size_t) indent);
5189 s_indent[indent] = '\0';
5191 /* Print the loop's header. */
5192 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5194 /* Print the loop's body. */
5195 fprintf (file, "%s{\n", s_indent);
5196 FOR_EACH_BB (bb)
5197 if (bb->loop_father == loop)
5199 /* Print the basic_block's header. */
5200 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5201 print_pred_bbs (file, bb);
5202 fprintf (file, "}, succs = {");
5203 print_succ_bbs (file, bb);
5204 fprintf (file, "})\n");
5206 /* Print the basic_block's body. */
5207 fprintf (file, "%s {\n", s_indent);
5208 tree_dump_bb (bb, file, indent + 4);
5209 fprintf (file, "%s }\n", s_indent);
5212 print_loop (file, loop->inner, indent + 2);
5213 fprintf (file, "%s}\n", s_indent);
5214 print_loop (file, loop->next, indent);
5218 /* Follow a CFG edge from the entry point of the program, and on entry
5219 of a loop, pretty print the loop structure on FILE. */
5221 void
5222 print_loop_ir (FILE *file)
5224 basic_block bb;
5226 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5227 if (bb && bb->loop_father)
5228 print_loop (file, bb->loop_father, 0);
5232 /* Debugging loops structure at tree level. */
5234 void
5235 debug_loop_ir (void)
5237 print_loop_ir (stderr);
5241 /* Return true if BB ends with a call, possibly followed by some
5242 instructions that must stay with the call. Return false,
5243 otherwise. */
5245 static bool
5246 tree_block_ends_with_call_p (basic_block bb)
5248 block_stmt_iterator bsi = bsi_last (bb);
5249 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5253 /* Return true if BB ends with a conditional branch. Return false,
5254 otherwise. */
5256 static bool
5257 tree_block_ends_with_condjump_p (basic_block bb)
5259 tree stmt = last_stmt (bb);
5260 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5264 /* Return true if we need to add fake edge to exit at statement T.
5265 Helper function for tree_flow_call_edges_add. */
5267 static bool
5268 need_fake_edge_p (tree t)
5270 tree call;
5272 /* NORETURN and LONGJMP calls already have an edge to exit.
5273 CONST and PURE calls do not need one.
5274 We don't currently check for CONST and PURE here, although
5275 it would be a good idea, because those attributes are
5276 figured out from the RTL in mark_constant_function, and
5277 the counter incrementation code from -fprofile-arcs
5278 leads to different results from -fbranch-probabilities. */
5279 call = get_call_expr_in (t);
5280 if (call
5281 && !(call_expr_flags (call) & ECF_NORETURN))
5282 return true;
5284 if (TREE_CODE (t) == ASM_EXPR
5285 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5286 return true;
5288 return false;
5292 /* Add fake edges to the function exit for any non constant and non
5293 noreturn calls, volatile inline assembly in the bitmap of blocks
5294 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5295 the number of blocks that were split.
5297 The goal is to expose cases in which entering a basic block does
5298 not imply that all subsequent instructions must be executed. */
5300 static int
5301 tree_flow_call_edges_add (sbitmap blocks)
5303 int i;
5304 int blocks_split = 0;
5305 int last_bb = last_basic_block;
5306 bool check_last_block = false;
5308 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5309 return 0;
5311 if (! blocks)
5312 check_last_block = true;
5313 else
5314 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5316 /* In the last basic block, before epilogue generation, there will be
5317 a fallthru edge to EXIT. Special care is required if the last insn
5318 of the last basic block is a call because make_edge folds duplicate
5319 edges, which would result in the fallthru edge also being marked
5320 fake, which would result in the fallthru edge being removed by
5321 remove_fake_edges, which would result in an invalid CFG.
5323 Moreover, we can't elide the outgoing fake edge, since the block
5324 profiler needs to take this into account in order to solve the minimal
5325 spanning tree in the case that the call doesn't return.
5327 Handle this by adding a dummy instruction in a new last basic block. */
5328 if (check_last_block)
5330 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5331 block_stmt_iterator bsi = bsi_last (bb);
5332 tree t = NULL_TREE;
5333 if (!bsi_end_p (bsi))
5334 t = bsi_stmt (bsi);
5336 if (t && need_fake_edge_p (t))
5338 edge e;
5340 e = find_edge (bb, EXIT_BLOCK_PTR);
5341 if (e)
5343 bsi_insert_on_edge (e, build_empty_stmt ());
5344 bsi_commit_edge_inserts ();
5349 /* Now add fake edges to the function exit for any non constant
5350 calls since there is no way that we can determine if they will
5351 return or not... */
5352 for (i = 0; i < last_bb; i++)
5354 basic_block bb = BASIC_BLOCK (i);
5355 block_stmt_iterator bsi;
5356 tree stmt, last_stmt;
5358 if (!bb)
5359 continue;
5361 if (blocks && !TEST_BIT (blocks, i))
5362 continue;
5364 bsi = bsi_last (bb);
5365 if (!bsi_end_p (bsi))
5367 last_stmt = bsi_stmt (bsi);
5370 stmt = bsi_stmt (bsi);
5371 if (need_fake_edge_p (stmt))
5373 edge e;
5374 /* The handling above of the final block before the
5375 epilogue should be enough to verify that there is
5376 no edge to the exit block in CFG already.
5377 Calling make_edge in such case would cause us to
5378 mark that edge as fake and remove it later. */
5379 #ifdef ENABLE_CHECKING
5380 if (stmt == last_stmt)
5382 e = find_edge (bb, EXIT_BLOCK_PTR);
5383 gcc_assert (e == NULL);
5385 #endif
5387 /* Note that the following may create a new basic block
5388 and renumber the existing basic blocks. */
5389 if (stmt != last_stmt)
5391 e = split_block (bb, stmt);
5392 if (e)
5393 blocks_split++;
5395 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5397 bsi_prev (&bsi);
5399 while (!bsi_end_p (bsi));
5403 if (blocks_split)
5404 verify_flow_info ();
5406 return blocks_split;
5409 /* Purge dead abnormal call edges from basic block BB. */
5411 bool
5412 tree_purge_dead_abnormal_call_edges (basic_block bb)
5414 bool changed = tree_purge_dead_eh_edges (bb);
5416 if (current_function_has_nonlocal_label)
5418 tree stmt = last_stmt (bb);
5419 edge_iterator ei;
5420 edge e;
5422 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5423 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5425 if (e->flags & EDGE_ABNORMAL)
5427 remove_edge (e);
5428 changed = true;
5430 else
5431 ei_next (&ei);
5434 /* See tree_purge_dead_eh_edges below. */
5435 if (changed)
5436 free_dominance_info (CDI_DOMINATORS);
5439 return changed;
5442 /* Purge dead EH edges from basic block BB. */
5444 bool
5445 tree_purge_dead_eh_edges (basic_block bb)
5447 bool changed = false;
5448 edge e;
5449 edge_iterator ei;
5450 tree stmt = last_stmt (bb);
5452 if (stmt && tree_can_throw_internal (stmt))
5453 return false;
5455 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5457 if (e->flags & EDGE_EH)
5459 remove_edge (e);
5460 changed = true;
5462 else
5463 ei_next (&ei);
5466 /* Removal of dead EH edges might change dominators of not
5467 just immediate successors. E.g. when bb1 is changed so that
5468 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5469 eh edges purged by this function in:
5473 1-->2
5474 / \ |
5475 v v |
5476 3-->4 |
5478 --->5
5481 idom(bb5) must be recomputed. For now just free the dominance
5482 info. */
5483 if (changed)
5484 free_dominance_info (CDI_DOMINATORS);
5486 return changed;
5489 bool
5490 tree_purge_all_dead_eh_edges (bitmap blocks)
5492 bool changed = false;
5493 unsigned i;
5494 bitmap_iterator bi;
5496 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5498 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5501 return changed;
5504 /* This function is called whenever a new edge is created or
5505 redirected. */
5507 static void
5508 tree_execute_on_growing_pred (edge e)
5510 basic_block bb = e->dest;
5512 if (phi_nodes (bb))
5513 reserve_phi_args_for_new_edge (bb);
5516 /* This function is called immediately before edge E is removed from
5517 the edge vector E->dest->preds. */
5519 static void
5520 tree_execute_on_shrinking_pred (edge e)
5522 if (phi_nodes (e->dest))
5523 remove_phi_args (e);
5526 /*---------------------------------------------------------------------------
5527 Helper functions for Loop versioning
5528 ---------------------------------------------------------------------------*/
5530 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5531 of 'first'. Both of them are dominated by 'new_head' basic block. When
5532 'new_head' was created by 'second's incoming edge it received phi arguments
5533 on the edge by split_edge(). Later, additional edge 'e' was created to
5534 connect 'new_head' and 'first'. Now this routine adds phi args on this
5535 additional edge 'e' that new_head to second edge received as part of edge
5536 splitting.
5539 static void
5540 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5541 basic_block new_head, edge e)
5543 tree phi1, phi2;
5544 edge e2 = find_edge (new_head, second);
5546 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5547 edge, we should always have an edge from NEW_HEAD to SECOND. */
5548 gcc_assert (e2 != NULL);
5550 /* Browse all 'second' basic block phi nodes and add phi args to
5551 edge 'e' for 'first' head. PHI args are always in correct order. */
5553 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5554 phi2 && phi1;
5555 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5557 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5558 add_phi_arg (phi1, def, e);
5562 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5563 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5564 the destination of the ELSE part. */
5565 static void
5566 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5567 basic_block cond_bb, void *cond_e)
5569 block_stmt_iterator bsi;
5570 tree goto1 = NULL_TREE;
5571 tree goto2 = NULL_TREE;
5572 tree new_cond_expr = NULL_TREE;
5573 tree cond_expr = (tree) cond_e;
5574 edge e0;
5576 /* Build new conditional expr */
5577 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5578 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5579 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5581 /* Add new cond in cond_bb. */
5582 bsi = bsi_start (cond_bb);
5583 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5584 /* Adjust edges appropriately to connect new head with first head
5585 as well as second head. */
5586 e0 = single_succ_edge (cond_bb);
5587 e0->flags &= ~EDGE_FALLTHRU;
5588 e0->flags |= EDGE_FALSE_VALUE;
5591 struct cfg_hooks tree_cfg_hooks = {
5592 "tree",
5593 tree_verify_flow_info,
5594 tree_dump_bb, /* dump_bb */
5595 create_bb, /* create_basic_block */
5596 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5597 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5598 remove_bb, /* delete_basic_block */
5599 tree_split_block, /* split_block */
5600 tree_move_block_after, /* move_block_after */
5601 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5602 tree_merge_blocks, /* merge_blocks */
5603 tree_predict_edge, /* predict_edge */
5604 tree_predicted_by_p, /* predicted_by_p */
5605 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5606 tree_duplicate_bb, /* duplicate_block */
5607 tree_split_edge, /* split_edge */
5608 tree_make_forwarder_block, /* make_forward_block */
5609 NULL, /* tidy_fallthru_edge */
5610 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5611 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5612 tree_flow_call_edges_add, /* flow_call_edges_add */
5613 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5614 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5615 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5616 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5617 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5618 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5619 flush_pending_stmts /* flush_pending_stmts */
5623 /* Split all critical edges. */
5625 static unsigned int
5626 split_critical_edges (void)
5628 basic_block bb;
5629 edge e;
5630 edge_iterator ei;
5632 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5633 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5634 mappings around the calls to split_edge. */
5635 start_recording_case_labels ();
5636 FOR_ALL_BB (bb)
5638 FOR_EACH_EDGE (e, ei, bb->succs)
5639 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5641 split_edge (e);
5644 end_recording_case_labels ();
5645 return 0;
5648 struct tree_opt_pass pass_split_crit_edges =
5650 "crited", /* name */
5651 NULL, /* gate */
5652 split_critical_edges, /* execute */
5653 NULL, /* sub */
5654 NULL, /* next */
5655 0, /* static_pass_number */
5656 TV_TREE_SPLIT_EDGES, /* tv_id */
5657 PROP_cfg, /* properties required */
5658 PROP_no_crit_edges, /* properties_provided */
5659 0, /* properties_destroyed */
5660 0, /* todo_flags_start */
5661 TODO_dump_func, /* todo_flags_finish */
5662 0 /* letter */
5666 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5667 a temporary, make sure and register it to be renamed if necessary,
5668 and finally return the temporary. Put the statements to compute
5669 EXP before the current statement in BSI. */
5671 tree
5672 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5674 tree t, new_stmt, orig_stmt;
5676 if (is_gimple_val (exp))
5677 return exp;
5679 t = make_rename_temp (type, NULL);
5680 new_stmt = build2_gimple (GIMPLE_MODIFY_STMT, t, exp);
5682 orig_stmt = bsi_stmt (*bsi);
5683 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5684 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5686 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5687 if (gimple_in_ssa_p (cfun))
5688 mark_new_vars_to_rename (new_stmt);
5690 return t;
5693 /* Build a ternary operation and gimplify it. Emit code before BSI.
5694 Return the gimple_val holding the result. */
5696 tree
5697 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5698 tree type, tree a, tree b, tree c)
5700 tree ret;
5702 ret = fold_build3 (code, type, a, b, c);
5703 STRIP_NOPS (ret);
5705 return gimplify_val (bsi, type, ret);
5708 /* Build a binary operation and gimplify it. Emit code before BSI.
5709 Return the gimple_val holding the result. */
5711 tree
5712 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5713 tree type, tree a, tree b)
5715 tree ret;
5717 ret = fold_build2 (code, type, a, b);
5718 STRIP_NOPS (ret);
5720 return gimplify_val (bsi, type, ret);
5723 /* Build a unary operation and gimplify it. Emit code before BSI.
5724 Return the gimple_val holding the result. */
5726 tree
5727 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5728 tree a)
5730 tree ret;
5732 ret = fold_build1 (code, type, a);
5733 STRIP_NOPS (ret);
5735 return gimplify_val (bsi, type, ret);
5740 /* Emit return warnings. */
5742 static unsigned int
5743 execute_warn_function_return (void)
5745 #ifdef USE_MAPPED_LOCATION
5746 source_location location;
5747 #else
5748 location_t *locus;
5749 #endif
5750 tree last;
5751 edge e;
5752 edge_iterator ei;
5754 /* If we have a path to EXIT, then we do return. */
5755 if (TREE_THIS_VOLATILE (cfun->decl)
5756 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5758 #ifdef USE_MAPPED_LOCATION
5759 location = UNKNOWN_LOCATION;
5760 #else
5761 locus = NULL;
5762 #endif
5763 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5765 last = last_stmt (e->src);
5766 if (TREE_CODE (last) == RETURN_EXPR
5767 #ifdef USE_MAPPED_LOCATION
5768 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5769 #else
5770 && (locus = EXPR_LOCUS (last)) != NULL)
5771 #endif
5772 break;
5774 #ifdef USE_MAPPED_LOCATION
5775 if (location == UNKNOWN_LOCATION)
5776 location = cfun->function_end_locus;
5777 warning (0, "%H%<noreturn%> function does return", &location);
5778 #else
5779 if (!locus)
5780 locus = &cfun->function_end_locus;
5781 warning (0, "%H%<noreturn%> function does return", locus);
5782 #endif
5785 /* If we see "return;" in some basic block, then we do reach the end
5786 without returning a value. */
5787 else if (warn_return_type
5788 && !TREE_NO_WARNING (cfun->decl)
5789 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5790 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5792 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5794 tree last = last_stmt (e->src);
5795 if (TREE_CODE (last) == RETURN_EXPR
5796 && TREE_OPERAND (last, 0) == NULL
5797 && !TREE_NO_WARNING (last))
5799 #ifdef USE_MAPPED_LOCATION
5800 location = EXPR_LOCATION (last);
5801 if (location == UNKNOWN_LOCATION)
5802 location = cfun->function_end_locus;
5803 warning (0, "%Hcontrol reaches end of non-void function", &location);
5804 #else
5805 locus = EXPR_LOCUS (last);
5806 if (!locus)
5807 locus = &cfun->function_end_locus;
5808 warning (0, "%Hcontrol reaches end of non-void function", locus);
5809 #endif
5810 TREE_NO_WARNING (cfun->decl) = 1;
5811 break;
5815 return 0;
5819 /* Given a basic block B which ends with a conditional and has
5820 precisely two successors, determine which of the edges is taken if
5821 the conditional is true and which is taken if the conditional is
5822 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5824 void
5825 extract_true_false_edges_from_block (basic_block b,
5826 edge *true_edge,
5827 edge *false_edge)
5829 edge e = EDGE_SUCC (b, 0);
5831 if (e->flags & EDGE_TRUE_VALUE)
5833 *true_edge = e;
5834 *false_edge = EDGE_SUCC (b, 1);
5836 else
5838 *false_edge = e;
5839 *true_edge = EDGE_SUCC (b, 1);
5843 struct tree_opt_pass pass_warn_function_return =
5845 NULL, /* name */
5846 NULL, /* gate */
5847 execute_warn_function_return, /* execute */
5848 NULL, /* sub */
5849 NULL, /* next */
5850 0, /* static_pass_number */
5851 0, /* tv_id */
5852 PROP_cfg, /* properties_required */
5853 0, /* properties_provided */
5854 0, /* properties_destroyed */
5855 0, /* todo_flags_start */
5856 0, /* todo_flags_finish */
5857 0 /* letter */
5860 /* Emit noreturn warnings. */
5862 static unsigned int
5863 execute_warn_function_noreturn (void)
5865 if (warn_missing_noreturn
5866 && !TREE_THIS_VOLATILE (cfun->decl)
5867 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5868 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5869 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5870 "for attribute %<noreturn%>",
5871 cfun->decl);
5872 return 0;
5875 struct tree_opt_pass pass_warn_function_noreturn =
5877 NULL, /* name */
5878 NULL, /* gate */
5879 execute_warn_function_noreturn, /* execute */
5880 NULL, /* sub */
5881 NULL, /* next */
5882 0, /* static_pass_number */
5883 0, /* tv_id */
5884 PROP_cfg, /* properties_required */
5885 0, /* properties_provided */
5886 0, /* properties_destroyed */
5887 0, /* todo_flags_start */
5888 0, /* todo_flags_finish */
5889 0 /* letter */