* doc/extend.texi: Document optional priority argument to
[official-gcc.git] / gcc / tree-cfg.c
blob202a69e2c8f03ce859d9d577b6494c05f9be427c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "tree-ssa-propagate.h"
48 #include "value-prof.h"
49 #include "pointer-set.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
90 /* Edges. */
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (tree, tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block, basic_block);
108 static bool tree_can_merge_blocks_p (basic_block, basic_block);
109 static void remove_bb (basic_block);
110 static edge find_taken_edge_computed_goto (basic_block, tree);
111 static edge find_taken_edge_cond_expr (basic_block, tree);
112 static edge find_taken_edge_switch_expr (basic_block, tree);
113 static tree find_case_label_for_value (tree, tree);
115 void
116 init_empty_tree_cfg (void)
118 /* Initialize the basic block array. */
119 init_flow ();
120 profile_status = PROFILE_ABSENT;
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
138 /*---------------------------------------------------------------------------
139 Create basic blocks
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
145 static void
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
153 init_empty_tree_cfg ();
155 found_computed_goto = 0;
156 make_blocks (*tp);
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
168 create_empty_bb (ENTRY_BLOCK_PTR);
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
182 /* Create the edges of the flowgraph. */
183 make_edges ();
185 /* Debugging dumps. */
187 /* Write the flowgraph to a VCG file. */
189 int local_dump_flags;
190 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
191 if (vcg_file)
193 tree_cfg2vcg (vcg_file);
194 dump_end (TDI_vcg, vcg_file);
198 #ifdef ENABLE_CHECKING
199 verify_stmts ();
200 #endif
202 /* Dump a textual representation of the flowgraph. */
203 if (dump_file)
204 dump_tree_cfg (dump_file, dump_flags);
207 static unsigned int
208 execute_build_cfg (void)
210 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
211 return 0;
214 struct tree_opt_pass pass_build_cfg =
216 "cfg", /* name */
217 NULL, /* gate */
218 execute_build_cfg, /* execute */
219 NULL, /* sub */
220 NULL, /* next */
221 0, /* static_pass_number */
222 TV_TREE_CFG, /* tv_id */
223 PROP_gimple_leh, /* properties_required */
224 PROP_cfg, /* properties_provided */
225 0, /* properties_destroyed */
226 0, /* todo_flags_start */
227 TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
228 0 /* letter */
231 /* Search the CFG for any computed gotos. If found, factor them to a
232 common computed goto site. Also record the location of that site so
233 that we can un-factor the gotos after we have converted back to
234 normal form. */
236 static void
237 factor_computed_gotos (void)
239 basic_block bb;
240 tree factored_label_decl = NULL;
241 tree var = NULL;
242 tree factored_computed_goto_label = NULL;
243 tree factored_computed_goto = NULL;
245 /* We know there are one or more computed gotos in this function.
246 Examine the last statement in each basic block to see if the block
247 ends with a computed goto. */
249 FOR_EACH_BB (bb)
251 block_stmt_iterator bsi = bsi_last (bb);
252 tree last;
254 if (bsi_end_p (bsi))
255 continue;
256 last = bsi_stmt (bsi);
258 /* Ignore the computed goto we create when we factor the original
259 computed gotos. */
260 if (last == factored_computed_goto)
261 continue;
263 /* If the last statement is a computed goto, factor it. */
264 if (computed_goto_p (last))
266 tree assignment;
268 /* The first time we find a computed goto we need to create
269 the factored goto block and the variable each original
270 computed goto will use for their goto destination. */
271 if (! factored_computed_goto)
273 basic_block new_bb = create_empty_bb (bb);
274 block_stmt_iterator new_bsi = bsi_start (new_bb);
276 /* Create the destination of the factored goto. Each original
277 computed goto will put its desired destination into this
278 variable and jump to the label we create immediately
279 below. */
280 var = create_tmp_var (ptr_type_node, "gotovar");
282 /* Build a label for the new block which will contain the
283 factored computed goto. */
284 factored_label_decl = create_artificial_label ();
285 factored_computed_goto_label
286 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
287 bsi_insert_after (&new_bsi, factored_computed_goto_label,
288 BSI_NEW_STMT);
290 /* Build our new computed goto. */
291 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
292 bsi_insert_after (&new_bsi, factored_computed_goto,
293 BSI_NEW_STMT);
296 /* Copy the original computed goto's destination into VAR. */
297 assignment = build_gimple_modify_stmt (var,
298 GOTO_DESTINATION (last));
299 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
301 /* And re-vector the computed goto to the new destination. */
302 GOTO_DESTINATION (last) = factored_label_decl;
308 /* Build a flowgraph for the statement_list STMT_LIST. */
310 static void
311 make_blocks (tree stmt_list)
313 tree_stmt_iterator i = tsi_start (stmt_list);
314 tree stmt = NULL;
315 bool start_new_block = true;
316 bool first_stmt_of_list = true;
317 basic_block bb = ENTRY_BLOCK_PTR;
319 while (!tsi_end_p (i))
321 tree prev_stmt;
323 prev_stmt = stmt;
324 stmt = tsi_stmt (i);
326 /* If the statement starts a new basic block or if we have determined
327 in a previous pass that we need to create a new block for STMT, do
328 so now. */
329 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
331 if (!first_stmt_of_list)
332 stmt_list = tsi_split_statement_list_before (&i);
333 bb = create_basic_block (stmt_list, NULL, bb);
334 start_new_block = false;
337 /* Now add STMT to BB and create the subgraphs for special statement
338 codes. */
339 set_bb_for_stmt (stmt, bb);
341 if (computed_goto_p (stmt))
342 found_computed_goto = true;
344 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
345 next iteration. */
346 if (stmt_ends_bb_p (stmt))
347 start_new_block = true;
349 tsi_next (&i);
350 first_stmt_of_list = false;
355 /* Create and return a new empty basic block after bb AFTER. */
357 static basic_block
358 create_bb (void *h, void *e, basic_block after)
360 basic_block bb;
362 gcc_assert (!e);
364 /* Create and initialize a new basic block. Since alloc_block uses
365 ggc_alloc_cleared to allocate a basic block, we do not have to
366 clear the newly allocated basic block here. */
367 bb = alloc_block ();
369 bb->index = last_basic_block;
370 bb->flags = BB_NEW;
371 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
373 /* Add the new block to the linked list of blocks. */
374 link_block (bb, after);
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
379 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
380 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
383 /* Add the newly created block to the array. */
384 SET_BASIC_BLOCK (last_basic_block, bb);
386 n_basic_blocks++;
387 last_basic_block++;
389 return bb;
393 /*---------------------------------------------------------------------------
394 Edge creation
395 ---------------------------------------------------------------------------*/
397 /* Fold COND_EXPR_COND of each COND_EXPR. */
399 void
400 fold_cond_expr_cond (void)
402 basic_block bb;
404 FOR_EACH_BB (bb)
406 tree stmt = last_stmt (bb);
408 if (stmt
409 && TREE_CODE (stmt) == COND_EXPR)
411 tree cond;
412 bool zerop, onep;
414 fold_defer_overflow_warnings ();
415 cond = fold (COND_EXPR_COND (stmt));
416 zerop = integer_zerop (cond);
417 onep = integer_onep (cond);
418 fold_undefer_overflow_warnings (zerop || onep, stmt,
419 WARN_STRICT_OVERFLOW_CONDITIONAL);
420 if (zerop)
421 COND_EXPR_COND (stmt) = boolean_false_node;
422 else if (onep)
423 COND_EXPR_COND (stmt) = boolean_true_node;
428 /* Join all the blocks in the flowgraph. */
430 static void
431 make_edges (void)
433 basic_block bb;
434 struct omp_region *cur_region = NULL;
436 /* Create an edge from entry to the first block with executable
437 statements in it. */
438 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
440 /* Traverse the basic block array placing edges. */
441 FOR_EACH_BB (bb)
443 tree last = last_stmt (bb);
444 bool fallthru;
446 if (last)
448 enum tree_code code = TREE_CODE (last);
449 switch (code)
451 case GOTO_EXPR:
452 make_goto_expr_edges (bb);
453 fallthru = false;
454 break;
455 case RETURN_EXPR:
456 make_edge (bb, EXIT_BLOCK_PTR, 0);
457 fallthru = false;
458 break;
459 case COND_EXPR:
460 make_cond_expr_edges (bb);
461 fallthru = false;
462 break;
463 case SWITCH_EXPR:
464 make_switch_expr_edges (bb);
465 fallthru = false;
466 break;
467 case RESX_EXPR:
468 make_eh_edges (last);
469 fallthru = false;
470 break;
472 case CALL_EXPR:
473 /* If this function receives a nonlocal goto, then we need to
474 make edges from this call site to all the nonlocal goto
475 handlers. */
476 if (tree_can_make_abnormal_goto (last))
477 make_abnormal_goto_edges (bb, true);
479 /* If this statement has reachable exception handlers, then
480 create abnormal edges to them. */
481 make_eh_edges (last);
483 /* Some calls are known not to return. */
484 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
485 break;
487 case MODIFY_EXPR:
488 gcc_unreachable ();
490 case GIMPLE_MODIFY_STMT:
491 if (is_ctrl_altering_stmt (last))
493 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
494 the CALL_EXPR may have an abnormal edge. Search the RHS
495 for this case and create any required edges. */
496 if (tree_can_make_abnormal_goto (last))
497 make_abnormal_goto_edges (bb, true);
499 make_eh_edges (last);
501 fallthru = true;
502 break;
504 case OMP_PARALLEL:
505 case OMP_FOR:
506 case OMP_SINGLE:
507 case OMP_MASTER:
508 case OMP_ORDERED:
509 case OMP_CRITICAL:
510 case OMP_SECTION:
511 cur_region = new_omp_region (bb, code, cur_region);
512 fallthru = true;
513 break;
515 case OMP_SECTIONS:
516 cur_region = new_omp_region (bb, code, cur_region);
517 fallthru = false;
518 break;
520 case OMP_RETURN:
521 /* In the case of an OMP_SECTION, the edge will go somewhere
522 other than the next block. This will be created later. */
523 cur_region->exit = bb;
524 fallthru = cur_region->type != OMP_SECTION;
525 cur_region = cur_region->outer;
526 break;
528 case OMP_CONTINUE:
529 cur_region->cont = bb;
530 switch (cur_region->type)
532 case OMP_FOR:
533 /* ??? Technically there should be a some sort of loopback
534 edge here, but it goes to a block that doesn't exist yet,
535 and without it, updating the ssa form would be a real
536 bear. Fortunately, we don't yet do ssa before expanding
537 these nodes. */
538 break;
540 case OMP_SECTIONS:
541 /* Wire up the edges into and out of the nested sections. */
542 /* ??? Similarly wrt loopback. */
544 struct omp_region *i;
545 for (i = cur_region->inner; i ; i = i->next)
547 gcc_assert (i->type == OMP_SECTION);
548 make_edge (cur_region->entry, i->entry, 0);
549 make_edge (i->exit, bb, EDGE_FALLTHRU);
552 break;
554 default:
555 gcc_unreachable ();
557 fallthru = true;
558 break;
560 default:
561 gcc_assert (!stmt_ends_bb_p (last));
562 fallthru = true;
565 else
566 fallthru = true;
568 if (fallthru)
569 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
572 if (root_omp_region)
573 free_omp_regions ();
575 /* Fold COND_EXPR_COND of each COND_EXPR. */
576 fold_cond_expr_cond ();
580 /* Create the edges for a COND_EXPR starting at block BB.
581 At this point, both clauses must contain only simple gotos. */
583 static void
584 make_cond_expr_edges (basic_block bb)
586 tree entry = last_stmt (bb);
587 basic_block then_bb, else_bb;
588 tree then_label, else_label;
589 edge e;
591 gcc_assert (entry);
592 gcc_assert (TREE_CODE (entry) == COND_EXPR);
594 /* Entry basic blocks for each component. */
595 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
596 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
597 then_bb = label_to_block (then_label);
598 else_bb = label_to_block (else_label);
600 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
601 #ifdef USE_MAPPED_LOCATION
602 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
603 #else
604 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
605 #endif
606 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
607 if (e)
609 #ifdef USE_MAPPED_LOCATION
610 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
611 #else
612 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
613 #endif
618 /* Called for each element in the hash table (P) as we delete the
619 edge to cases hash table.
621 Clear all the TREE_CHAINs to prevent problems with copying of
622 SWITCH_EXPRs and structure sharing rules, then free the hash table
623 element. */
625 static bool
626 edge_to_cases_cleanup (void *key ATTRIBUTE_UNUSED, void **value,
627 void *data ATTRIBUTE_UNUSED)
629 tree t, next;
631 for (t = (tree) *value; t; t = next)
633 next = TREE_CHAIN (t);
634 TREE_CHAIN (t) = NULL;
637 *value = NULL;
638 return false;
641 /* Start recording information mapping edges to case labels. */
643 void
644 start_recording_case_labels (void)
646 gcc_assert (edge_to_cases == NULL);
647 edge_to_cases = pointer_map_create ();
650 /* Return nonzero if we are recording information for case labels. */
652 static bool
653 recording_case_labels_p (void)
655 return (edge_to_cases != NULL);
658 /* Stop recording information mapping edges to case labels and
659 remove any information we have recorded. */
660 void
661 end_recording_case_labels (void)
663 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
664 pointer_map_destroy (edge_to_cases);
665 edge_to_cases = NULL;
668 /* If we are inside a {start,end}_recording_cases block, then return
669 a chain of CASE_LABEL_EXPRs from T which reference E.
671 Otherwise return NULL. */
673 static tree
674 get_cases_for_edge (edge e, tree t)
676 void **slot;
677 size_t i, n;
678 tree vec;
680 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
681 chains available. Return NULL so the caller can detect this case. */
682 if (!recording_case_labels_p ())
683 return NULL;
685 slot = pointer_map_contains (edge_to_cases, e);
686 if (slot)
687 return (tree) *slot;
689 /* If we did not find E in the hash table, then this must be the first
690 time we have been queried for information about E & T. Add all the
691 elements from T to the hash table then perform the query again. */
693 vec = SWITCH_LABELS (t);
694 n = TREE_VEC_LENGTH (vec);
695 for (i = 0; i < n; i++)
697 tree elt = TREE_VEC_ELT (vec, i);
698 tree lab = CASE_LABEL (elt);
699 basic_block label_bb = label_to_block (lab);
700 edge this_edge = find_edge (e->src, label_bb);
702 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
703 a new chain. */
704 slot = pointer_map_insert (edge_to_cases, this_edge);
705 TREE_CHAIN (elt) = (tree) *slot;
706 *slot = elt;
709 return (tree) *pointer_map_contains (edge_to_cases, e);
712 /* Create the edges for a SWITCH_EXPR starting at block BB.
713 At this point, the switch body has been lowered and the
714 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
716 static void
717 make_switch_expr_edges (basic_block bb)
719 tree entry = last_stmt (bb);
720 size_t i, n;
721 tree vec;
723 vec = SWITCH_LABELS (entry);
724 n = TREE_VEC_LENGTH (vec);
726 for (i = 0; i < n; ++i)
728 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
729 basic_block label_bb = label_to_block (lab);
730 make_edge (bb, label_bb, 0);
735 /* Return the basic block holding label DEST. */
737 basic_block
738 label_to_block_fn (struct function *ifun, tree dest)
740 int uid = LABEL_DECL_UID (dest);
742 /* We would die hard when faced by an undefined label. Emit a label to
743 the very first basic block. This will hopefully make even the dataflow
744 and undefined variable warnings quite right. */
745 if ((errorcount || sorrycount) && uid < 0)
747 block_stmt_iterator bsi =
748 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
749 tree stmt;
751 stmt = build1 (LABEL_EXPR, void_type_node, dest);
752 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
753 uid = LABEL_DECL_UID (dest);
755 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
756 <= (unsigned int) uid)
757 return NULL;
758 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
761 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
762 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
764 void
765 make_abnormal_goto_edges (basic_block bb, bool for_call)
767 basic_block target_bb;
768 block_stmt_iterator bsi;
770 FOR_EACH_BB (target_bb)
771 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
773 tree target = bsi_stmt (bsi);
775 if (TREE_CODE (target) != LABEL_EXPR)
776 break;
778 target = LABEL_EXPR_LABEL (target);
780 /* Make an edge to every label block that has been marked as a
781 potential target for a computed goto or a non-local goto. */
782 if ((FORCED_LABEL (target) && !for_call)
783 || (DECL_NONLOCAL (target) && for_call))
785 make_edge (bb, target_bb, EDGE_ABNORMAL);
786 break;
791 /* Create edges for a goto statement at block BB. */
793 static void
794 make_goto_expr_edges (basic_block bb)
796 block_stmt_iterator last = bsi_last (bb);
797 tree goto_t = bsi_stmt (last);
799 /* A simple GOTO creates normal edges. */
800 if (simple_goto_p (goto_t))
802 tree dest = GOTO_DESTINATION (goto_t);
803 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
804 #ifdef USE_MAPPED_LOCATION
805 e->goto_locus = EXPR_LOCATION (goto_t);
806 #else
807 e->goto_locus = EXPR_LOCUS (goto_t);
808 #endif
809 bsi_remove (&last, true);
810 return;
813 /* A computed GOTO creates abnormal edges. */
814 make_abnormal_goto_edges (bb, false);
818 /*---------------------------------------------------------------------------
819 Flowgraph analysis
820 ---------------------------------------------------------------------------*/
822 /* Cleanup useless labels in basic blocks. This is something we wish
823 to do early because it allows us to group case labels before creating
824 the edges for the CFG, and it speeds up block statement iterators in
825 all passes later on.
826 We only run this pass once, running it more than once is probably not
827 profitable. */
829 /* A map from basic block index to the leading label of that block. */
830 static tree *label_for_bb;
832 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
833 static void
834 update_eh_label (struct eh_region *region)
836 tree old_label = get_eh_region_tree_label (region);
837 if (old_label)
839 tree new_label;
840 basic_block bb = label_to_block (old_label);
842 /* ??? After optimizing, there may be EH regions with labels
843 that have already been removed from the function body, so
844 there is no basic block for them. */
845 if (! bb)
846 return;
848 new_label = label_for_bb[bb->index];
849 set_eh_region_tree_label (region, new_label);
853 /* Given LABEL return the first label in the same basic block. */
854 static tree
855 main_block_label (tree label)
857 basic_block bb = label_to_block (label);
859 /* label_to_block possibly inserted undefined label into the chain. */
860 if (!label_for_bb[bb->index])
861 label_for_bb[bb->index] = label;
862 return label_for_bb[bb->index];
865 /* Cleanup redundant labels. This is a three-step process:
866 1) Find the leading label for each block.
867 2) Redirect all references to labels to the leading labels.
868 3) Cleanup all useless labels. */
870 void
871 cleanup_dead_labels (void)
873 basic_block bb;
874 label_for_bb = XCNEWVEC (tree, last_basic_block);
876 /* Find a suitable label for each block. We use the first user-defined
877 label if there is one, or otherwise just the first label we see. */
878 FOR_EACH_BB (bb)
880 block_stmt_iterator i;
882 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
884 tree label, stmt = bsi_stmt (i);
886 if (TREE_CODE (stmt) != LABEL_EXPR)
887 break;
889 label = LABEL_EXPR_LABEL (stmt);
891 /* If we have not yet seen a label for the current block,
892 remember this one and see if there are more labels. */
893 if (! label_for_bb[bb->index])
895 label_for_bb[bb->index] = label;
896 continue;
899 /* If we did see a label for the current block already, but it
900 is an artificially created label, replace it if the current
901 label is a user defined label. */
902 if (! DECL_ARTIFICIAL (label)
903 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
905 label_for_bb[bb->index] = label;
906 break;
911 /* Now redirect all jumps/branches to the selected label.
912 First do so for each block ending in a control statement. */
913 FOR_EACH_BB (bb)
915 tree stmt = last_stmt (bb);
916 if (!stmt)
917 continue;
919 switch (TREE_CODE (stmt))
921 case COND_EXPR:
923 tree true_branch, false_branch;
925 true_branch = COND_EXPR_THEN (stmt);
926 false_branch = COND_EXPR_ELSE (stmt);
928 GOTO_DESTINATION (true_branch)
929 = main_block_label (GOTO_DESTINATION (true_branch));
930 GOTO_DESTINATION (false_branch)
931 = main_block_label (GOTO_DESTINATION (false_branch));
933 break;
936 case SWITCH_EXPR:
938 size_t i;
939 tree vec = SWITCH_LABELS (stmt);
940 size_t n = TREE_VEC_LENGTH (vec);
942 /* Replace all destination labels. */
943 for (i = 0; i < n; ++i)
945 tree elt = TREE_VEC_ELT (vec, i);
946 tree label = main_block_label (CASE_LABEL (elt));
947 CASE_LABEL (elt) = label;
949 break;
952 /* We have to handle GOTO_EXPRs until they're removed, and we don't
953 remove them until after we've created the CFG edges. */
954 case GOTO_EXPR:
955 if (! computed_goto_p (stmt))
957 GOTO_DESTINATION (stmt)
958 = main_block_label (GOTO_DESTINATION (stmt));
959 break;
962 default:
963 break;
967 for_each_eh_region (update_eh_label);
969 /* Finally, purge dead labels. All user-defined labels and labels that
970 can be the target of non-local gotos and labels which have their
971 address taken are preserved. */
972 FOR_EACH_BB (bb)
974 block_stmt_iterator i;
975 tree label_for_this_bb = label_for_bb[bb->index];
977 if (! label_for_this_bb)
978 continue;
980 for (i = bsi_start (bb); !bsi_end_p (i); )
982 tree label, stmt = bsi_stmt (i);
984 if (TREE_CODE (stmt) != LABEL_EXPR)
985 break;
987 label = LABEL_EXPR_LABEL (stmt);
989 if (label == label_for_this_bb
990 || ! DECL_ARTIFICIAL (label)
991 || DECL_NONLOCAL (label)
992 || FORCED_LABEL (label))
993 bsi_next (&i);
994 else
995 bsi_remove (&i, true);
999 free (label_for_bb);
1002 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1003 and scan the sorted vector of cases. Combine the ones jumping to the
1004 same label.
1005 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1007 void
1008 group_case_labels (void)
1010 basic_block bb;
1012 FOR_EACH_BB (bb)
1014 tree stmt = last_stmt (bb);
1015 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1017 tree labels = SWITCH_LABELS (stmt);
1018 int old_size = TREE_VEC_LENGTH (labels);
1019 int i, j, new_size = old_size;
1020 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1021 tree default_label;
1023 /* The default label is always the last case in a switch
1024 statement after gimplification. */
1025 default_label = CASE_LABEL (default_case);
1027 /* Look for possible opportunities to merge cases.
1028 Ignore the last element of the label vector because it
1029 must be the default case. */
1030 i = 0;
1031 while (i < old_size - 1)
1033 tree base_case, base_label, base_high;
1034 base_case = TREE_VEC_ELT (labels, i);
1036 gcc_assert (base_case);
1037 base_label = CASE_LABEL (base_case);
1039 /* Discard cases that have the same destination as the
1040 default case. */
1041 if (base_label == default_label)
1043 TREE_VEC_ELT (labels, i) = NULL_TREE;
1044 i++;
1045 new_size--;
1046 continue;
1049 base_high = CASE_HIGH (base_case) ?
1050 CASE_HIGH (base_case) : CASE_LOW (base_case);
1051 i++;
1052 /* Try to merge case labels. Break out when we reach the end
1053 of the label vector or when we cannot merge the next case
1054 label with the current one. */
1055 while (i < old_size - 1)
1057 tree merge_case = TREE_VEC_ELT (labels, i);
1058 tree merge_label = CASE_LABEL (merge_case);
1059 tree t = int_const_binop (PLUS_EXPR, base_high,
1060 integer_one_node, 1);
1062 /* Merge the cases if they jump to the same place,
1063 and their ranges are consecutive. */
1064 if (merge_label == base_label
1065 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1067 base_high = CASE_HIGH (merge_case) ?
1068 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1069 CASE_HIGH (base_case) = base_high;
1070 TREE_VEC_ELT (labels, i) = NULL_TREE;
1071 new_size--;
1072 i++;
1074 else
1075 break;
1079 /* Compress the case labels in the label vector, and adjust the
1080 length of the vector. */
1081 for (i = 0, j = 0; i < new_size; i++)
1083 while (! TREE_VEC_ELT (labels, j))
1084 j++;
1085 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1087 TREE_VEC_LENGTH (labels) = new_size;
1092 /* Checks whether we can merge block B into block A. */
1094 static bool
1095 tree_can_merge_blocks_p (basic_block a, basic_block b)
1097 tree stmt;
1098 block_stmt_iterator bsi;
1099 tree phi;
1101 if (!single_succ_p (a))
1102 return false;
1104 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1105 return false;
1107 if (single_succ (a) != b)
1108 return false;
1110 if (!single_pred_p (b))
1111 return false;
1113 if (b == EXIT_BLOCK_PTR)
1114 return false;
1116 /* If A ends by a statement causing exceptions or something similar, we
1117 cannot merge the blocks. */
1118 stmt = last_stmt (a);
1119 if (stmt && stmt_ends_bb_p (stmt))
1120 return false;
1122 /* Do not allow a block with only a non-local label to be merged. */
1123 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1124 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1125 return false;
1127 /* It must be possible to eliminate all phi nodes in B. If ssa form
1128 is not up-to-date, we cannot eliminate any phis; however, if only
1129 some symbols as whole are marked for renaming, this is not a problem,
1130 as phi nodes for those symbols are irrelevant in updating anyway. */
1131 phi = phi_nodes (b);
1132 if (phi)
1134 if (name_mappings_registered_p ())
1135 return false;
1137 for (; phi; phi = PHI_CHAIN (phi))
1138 if (!is_gimple_reg (PHI_RESULT (phi))
1139 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1140 return false;
1143 /* Do not remove user labels. */
1144 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1146 stmt = bsi_stmt (bsi);
1147 if (TREE_CODE (stmt) != LABEL_EXPR)
1148 break;
1149 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1150 return false;
1153 /* Protect the loop latches. */
1154 if (current_loops
1155 && b->loop_father->latch == b)
1156 return false;
1158 return true;
1161 /* Replaces all uses of NAME by VAL. */
1163 void
1164 replace_uses_by (tree name, tree val)
1166 imm_use_iterator imm_iter;
1167 use_operand_p use;
1168 tree stmt;
1169 edge e;
1171 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1173 if (TREE_CODE (stmt) != PHI_NODE)
1174 push_stmt_changes (&stmt);
1176 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1178 replace_exp (use, val);
1180 if (TREE_CODE (stmt) == PHI_NODE)
1182 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1183 if (e->flags & EDGE_ABNORMAL)
1185 /* This can only occur for virtual operands, since
1186 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1187 would prevent replacement. */
1188 gcc_assert (!is_gimple_reg (name));
1189 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1194 if (TREE_CODE (stmt) != PHI_NODE)
1196 tree rhs;
1198 fold_stmt_inplace (stmt);
1200 /* FIXME. This should go in pop_stmt_changes. */
1201 rhs = get_rhs (stmt);
1202 if (TREE_CODE (rhs) == ADDR_EXPR)
1203 recompute_tree_invariant_for_addr_expr (rhs);
1205 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1207 pop_stmt_changes (&stmt);
1211 gcc_assert (has_zero_uses (name));
1213 /* Also update the trees stored in loop structures. */
1214 if (current_loops)
1216 struct loop *loop;
1217 loop_iterator li;
1219 FOR_EACH_LOOP (li, loop, 0)
1221 substitute_in_loop_info (loop, name, val);
1226 /* Merge block B into block A. */
1228 static void
1229 tree_merge_blocks (basic_block a, basic_block b)
1231 block_stmt_iterator bsi;
1232 tree_stmt_iterator last;
1233 tree phi;
1235 if (dump_file)
1236 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1238 /* Remove all single-valued PHI nodes from block B of the form
1239 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1240 bsi = bsi_last (a);
1241 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1243 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1244 tree copy;
1245 bool may_replace_uses = may_propagate_copy (def, use);
1247 /* In case we have loops to care about, do not propagate arguments of
1248 loop closed ssa phi nodes. */
1249 if (current_loops
1250 && is_gimple_reg (def)
1251 && TREE_CODE (use) == SSA_NAME
1252 && a->loop_father != b->loop_father)
1253 may_replace_uses = false;
1255 if (!may_replace_uses)
1257 gcc_assert (is_gimple_reg (def));
1259 /* Note that just emitting the copies is fine -- there is no problem
1260 with ordering of phi nodes. This is because A is the single
1261 predecessor of B, therefore results of the phi nodes cannot
1262 appear as arguments of the phi nodes. */
1263 copy = build_gimple_modify_stmt (def, use);
1264 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1265 SSA_NAME_DEF_STMT (def) = copy;
1266 remove_phi_node (phi, NULL, false);
1268 else
1270 replace_uses_by (def, use);
1271 remove_phi_node (phi, NULL, true);
1275 /* Ensure that B follows A. */
1276 move_block_after (b, a);
1278 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1279 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1281 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1282 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1284 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1286 tree label = bsi_stmt (bsi);
1288 bsi_remove (&bsi, false);
1289 /* Now that we can thread computed gotos, we might have
1290 a situation where we have a forced label in block B
1291 However, the label at the start of block B might still be
1292 used in other ways (think about the runtime checking for
1293 Fortran assigned gotos). So we can not just delete the
1294 label. Instead we move the label to the start of block A. */
1295 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1297 block_stmt_iterator dest_bsi = bsi_start (a);
1298 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1301 else
1303 change_bb_for_stmt (bsi_stmt (bsi), a);
1304 bsi_next (&bsi);
1308 /* Merge the chains. */
1309 last = tsi_last (a->stmt_list);
1310 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1311 b->stmt_list = NULL;
1315 /* Return the one of two successors of BB that is not reachable by a
1316 reached by a complex edge, if there is one. Else, return BB. We use
1317 this in optimizations that use post-dominators for their heuristics,
1318 to catch the cases in C++ where function calls are involved. */
1320 basic_block
1321 single_noncomplex_succ (basic_block bb)
1323 edge e0, e1;
1324 if (EDGE_COUNT (bb->succs) != 2)
1325 return bb;
1327 e0 = EDGE_SUCC (bb, 0);
1328 e1 = EDGE_SUCC (bb, 1);
1329 if (e0->flags & EDGE_COMPLEX)
1330 return e1->dest;
1331 if (e1->flags & EDGE_COMPLEX)
1332 return e0->dest;
1334 return bb;
1338 /* Walk the function tree removing unnecessary statements.
1340 * Empty statement nodes are removed
1342 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1344 * Unnecessary COND_EXPRs are removed
1346 * Some unnecessary BIND_EXPRs are removed
1348 Clearly more work could be done. The trick is doing the analysis
1349 and removal fast enough to be a net improvement in compile times.
1351 Note that when we remove a control structure such as a COND_EXPR
1352 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1353 to ensure we eliminate all the useless code. */
1355 struct rus_data
1357 tree *last_goto;
1358 bool repeat;
1359 bool may_throw;
1360 bool may_branch;
1361 bool has_label;
1364 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1366 static bool
1367 remove_useless_stmts_warn_notreached (tree stmt)
1369 if (EXPR_HAS_LOCATION (stmt))
1371 location_t loc = EXPR_LOCATION (stmt);
1372 if (LOCATION_LINE (loc) > 0)
1374 warning (0, "%Hwill never be executed", &loc);
1375 return true;
1379 switch (TREE_CODE (stmt))
1381 case STATEMENT_LIST:
1383 tree_stmt_iterator i;
1384 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1385 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1386 return true;
1388 break;
1390 case COND_EXPR:
1391 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1392 return true;
1393 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1394 return true;
1395 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1396 return true;
1397 break;
1399 case TRY_FINALLY_EXPR:
1400 case TRY_CATCH_EXPR:
1401 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1402 return true;
1403 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1404 return true;
1405 break;
1407 case CATCH_EXPR:
1408 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1409 case EH_FILTER_EXPR:
1410 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1411 case BIND_EXPR:
1412 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1414 default:
1415 /* Not a live container. */
1416 break;
1419 return false;
1422 static void
1423 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1425 tree then_clause, else_clause, cond;
1426 bool save_has_label, then_has_label, else_has_label;
1428 save_has_label = data->has_label;
1429 data->has_label = false;
1430 data->last_goto = NULL;
1432 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1434 then_has_label = data->has_label;
1435 data->has_label = false;
1436 data->last_goto = NULL;
1438 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1440 else_has_label = data->has_label;
1441 data->has_label = save_has_label | then_has_label | else_has_label;
1443 then_clause = COND_EXPR_THEN (*stmt_p);
1444 else_clause = COND_EXPR_ELSE (*stmt_p);
1445 cond = fold (COND_EXPR_COND (*stmt_p));
1447 /* If neither arm does anything at all, we can remove the whole IF. */
1448 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1450 *stmt_p = build_empty_stmt ();
1451 data->repeat = true;
1454 /* If there are no reachable statements in an arm, then we can
1455 zap the entire conditional. */
1456 else if (integer_nonzerop (cond) && !else_has_label)
1458 if (warn_notreached)
1459 remove_useless_stmts_warn_notreached (else_clause);
1460 *stmt_p = then_clause;
1461 data->repeat = true;
1463 else if (integer_zerop (cond) && !then_has_label)
1465 if (warn_notreached)
1466 remove_useless_stmts_warn_notreached (then_clause);
1467 *stmt_p = else_clause;
1468 data->repeat = true;
1471 /* Check a couple of simple things on then/else with single stmts. */
1472 else
1474 tree then_stmt = expr_only (then_clause);
1475 tree else_stmt = expr_only (else_clause);
1477 /* Notice branches to a common destination. */
1478 if (then_stmt && else_stmt
1479 && TREE_CODE (then_stmt) == GOTO_EXPR
1480 && TREE_CODE (else_stmt) == GOTO_EXPR
1481 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1483 *stmt_p = then_stmt;
1484 data->repeat = true;
1487 /* If the THEN/ELSE clause merely assigns a value to a variable or
1488 parameter which is already known to contain that value, then
1489 remove the useless THEN/ELSE clause. */
1490 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1492 if (else_stmt
1493 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1494 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1495 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1496 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1498 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1499 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1500 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1501 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1503 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1504 ? then_stmt : else_stmt);
1505 tree *location = (TREE_CODE (cond) == EQ_EXPR
1506 ? &COND_EXPR_THEN (*stmt_p)
1507 : &COND_EXPR_ELSE (*stmt_p));
1509 if (stmt
1510 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1511 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1512 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1513 *location = alloc_stmt_list ();
1517 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1518 would be re-introduced during lowering. */
1519 data->last_goto = NULL;
1523 static void
1524 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1526 bool save_may_branch, save_may_throw;
1527 bool this_may_branch, this_may_throw;
1529 /* Collect may_branch and may_throw information for the body only. */
1530 save_may_branch = data->may_branch;
1531 save_may_throw = data->may_throw;
1532 data->may_branch = false;
1533 data->may_throw = false;
1534 data->last_goto = NULL;
1536 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1538 this_may_branch = data->may_branch;
1539 this_may_throw = data->may_throw;
1540 data->may_branch |= save_may_branch;
1541 data->may_throw |= save_may_throw;
1542 data->last_goto = NULL;
1544 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1546 /* If the body is empty, then we can emit the FINALLY block without
1547 the enclosing TRY_FINALLY_EXPR. */
1548 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1550 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1551 data->repeat = true;
1554 /* If the handler is empty, then we can emit the TRY block without
1555 the enclosing TRY_FINALLY_EXPR. */
1556 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1558 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1559 data->repeat = true;
1562 /* If the body neither throws, nor branches, then we can safely
1563 string the TRY and FINALLY blocks together. */
1564 else if (!this_may_branch && !this_may_throw)
1566 tree stmt = *stmt_p;
1567 *stmt_p = TREE_OPERAND (stmt, 0);
1568 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1569 data->repeat = true;
1574 static void
1575 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1577 bool save_may_throw, this_may_throw;
1578 tree_stmt_iterator i;
1579 tree stmt;
1581 /* Collect may_throw information for the body only. */
1582 save_may_throw = data->may_throw;
1583 data->may_throw = false;
1584 data->last_goto = NULL;
1586 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1588 this_may_throw = data->may_throw;
1589 data->may_throw = save_may_throw;
1591 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1592 if (!this_may_throw)
1594 if (warn_notreached)
1595 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1596 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1597 data->repeat = true;
1598 return;
1601 /* Process the catch clause specially. We may be able to tell that
1602 no exceptions propagate past this point. */
1604 this_may_throw = true;
1605 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1606 stmt = tsi_stmt (i);
1607 data->last_goto = NULL;
1609 switch (TREE_CODE (stmt))
1611 case CATCH_EXPR:
1612 for (; !tsi_end_p (i); tsi_next (&i))
1614 stmt = tsi_stmt (i);
1615 /* If we catch all exceptions, then the body does not
1616 propagate exceptions past this point. */
1617 if (CATCH_TYPES (stmt) == NULL)
1618 this_may_throw = false;
1619 data->last_goto = NULL;
1620 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1622 break;
1624 case EH_FILTER_EXPR:
1625 if (EH_FILTER_MUST_NOT_THROW (stmt))
1626 this_may_throw = false;
1627 else if (EH_FILTER_TYPES (stmt) == NULL)
1628 this_may_throw = false;
1629 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1630 break;
1632 default:
1633 /* Otherwise this is a cleanup. */
1634 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1636 /* If the cleanup is empty, then we can emit the TRY block without
1637 the enclosing TRY_CATCH_EXPR. */
1638 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1640 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1641 data->repeat = true;
1643 break;
1645 data->may_throw |= this_may_throw;
1649 static void
1650 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1652 tree block;
1654 /* First remove anything underneath the BIND_EXPR. */
1655 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1657 /* If the BIND_EXPR has no variables, then we can pull everything
1658 up one level and remove the BIND_EXPR, unless this is the toplevel
1659 BIND_EXPR for the current function or an inlined function.
1661 When this situation occurs we will want to apply this
1662 optimization again. */
1663 block = BIND_EXPR_BLOCK (*stmt_p);
1664 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1665 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1666 && (! block
1667 || ! BLOCK_ABSTRACT_ORIGIN (block)
1668 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1669 != FUNCTION_DECL)))
1671 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1672 data->repeat = true;
1677 static void
1678 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1680 tree dest = GOTO_DESTINATION (*stmt_p);
1682 data->may_branch = true;
1683 data->last_goto = NULL;
1685 /* Record the last goto expr, so that we can delete it if unnecessary. */
1686 if (TREE_CODE (dest) == LABEL_DECL)
1687 data->last_goto = stmt_p;
1691 static void
1692 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1694 tree label = LABEL_EXPR_LABEL (*stmt_p);
1696 data->has_label = true;
1698 /* We do want to jump across non-local label receiver code. */
1699 if (DECL_NONLOCAL (label))
1700 data->last_goto = NULL;
1702 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1704 *data->last_goto = build_empty_stmt ();
1705 data->repeat = true;
1708 /* ??? Add something here to delete unused labels. */
1712 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1713 decl. This allows us to eliminate redundant or useless
1714 calls to "const" functions.
1716 Gimplifier already does the same operation, but we may notice functions
1717 being const and pure once their calls has been gimplified, so we need
1718 to update the flag. */
1720 static void
1721 update_call_expr_flags (tree call)
1723 tree decl = get_callee_fndecl (call);
1724 if (!decl)
1725 return;
1726 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1727 TREE_SIDE_EFFECTS (call) = 0;
1728 if (TREE_NOTHROW (decl))
1729 TREE_NOTHROW (call) = 1;
1733 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1735 void
1736 notice_special_calls (tree t)
1738 int flags = call_expr_flags (t);
1740 if (flags & ECF_MAY_BE_ALLOCA)
1741 current_function_calls_alloca = true;
1742 if (flags & ECF_RETURNS_TWICE)
1743 current_function_calls_setjmp = true;
1747 /* Clear flags set by notice_special_calls. Used by dead code removal
1748 to update the flags. */
1750 void
1751 clear_special_calls (void)
1753 current_function_calls_alloca = false;
1754 current_function_calls_setjmp = false;
1758 static void
1759 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1761 tree t = *tp, op;
1763 switch (TREE_CODE (t))
1765 case COND_EXPR:
1766 remove_useless_stmts_cond (tp, data);
1767 break;
1769 case TRY_FINALLY_EXPR:
1770 remove_useless_stmts_tf (tp, data);
1771 break;
1773 case TRY_CATCH_EXPR:
1774 remove_useless_stmts_tc (tp, data);
1775 break;
1777 case BIND_EXPR:
1778 remove_useless_stmts_bind (tp, data);
1779 break;
1781 case GOTO_EXPR:
1782 remove_useless_stmts_goto (tp, data);
1783 break;
1785 case LABEL_EXPR:
1786 remove_useless_stmts_label (tp, data);
1787 break;
1789 case RETURN_EXPR:
1790 fold_stmt (tp);
1791 data->last_goto = NULL;
1792 data->may_branch = true;
1793 break;
1795 case CALL_EXPR:
1796 fold_stmt (tp);
1797 data->last_goto = NULL;
1798 notice_special_calls (t);
1799 update_call_expr_flags (t);
1800 if (tree_could_throw_p (t))
1801 data->may_throw = true;
1802 break;
1804 case MODIFY_EXPR:
1805 gcc_unreachable ();
1807 case GIMPLE_MODIFY_STMT:
1808 data->last_goto = NULL;
1809 fold_stmt (tp);
1810 op = get_call_expr_in (t);
1811 if (op)
1813 update_call_expr_flags (op);
1814 notice_special_calls (op);
1816 if (tree_could_throw_p (t))
1817 data->may_throw = true;
1818 break;
1820 case STATEMENT_LIST:
1822 tree_stmt_iterator i = tsi_start (t);
1823 while (!tsi_end_p (i))
1825 t = tsi_stmt (i);
1826 if (IS_EMPTY_STMT (t))
1828 tsi_delink (&i);
1829 continue;
1832 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1834 t = tsi_stmt (i);
1835 if (TREE_CODE (t) == STATEMENT_LIST)
1837 tsi_link_before (&i, t, TSI_SAME_STMT);
1838 tsi_delink (&i);
1840 else
1841 tsi_next (&i);
1844 break;
1845 case ASM_EXPR:
1846 fold_stmt (tp);
1847 data->last_goto = NULL;
1848 break;
1850 default:
1851 data->last_goto = NULL;
1852 break;
1856 static unsigned int
1857 remove_useless_stmts (void)
1859 struct rus_data data;
1861 clear_special_calls ();
1865 memset (&data, 0, sizeof (data));
1866 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1868 while (data.repeat);
1869 return 0;
1873 struct tree_opt_pass pass_remove_useless_stmts =
1875 "useless", /* name */
1876 NULL, /* gate */
1877 remove_useless_stmts, /* execute */
1878 NULL, /* sub */
1879 NULL, /* next */
1880 0, /* static_pass_number */
1881 0, /* tv_id */
1882 PROP_gimple_any, /* properties_required */
1883 0, /* properties_provided */
1884 0, /* properties_destroyed */
1885 0, /* todo_flags_start */
1886 TODO_dump_func, /* todo_flags_finish */
1887 0 /* letter */
1890 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1892 static void
1893 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1895 tree phi;
1897 /* Since this block is no longer reachable, we can just delete all
1898 of its PHI nodes. */
1899 phi = phi_nodes (bb);
1900 while (phi)
1902 tree next = PHI_CHAIN (phi);
1903 remove_phi_node (phi, NULL_TREE, true);
1904 phi = next;
1907 /* Remove edges to BB's successors. */
1908 while (EDGE_COUNT (bb->succs) > 0)
1909 remove_edge (EDGE_SUCC (bb, 0));
1913 /* Remove statements of basic block BB. */
1915 static void
1916 remove_bb (basic_block bb)
1918 block_stmt_iterator i;
1919 #ifdef USE_MAPPED_LOCATION
1920 source_location loc = UNKNOWN_LOCATION;
1921 #else
1922 source_locus loc = 0;
1923 #endif
1925 if (dump_file)
1927 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1928 if (dump_flags & TDF_DETAILS)
1930 dump_bb (bb, dump_file, 0);
1931 fprintf (dump_file, "\n");
1935 if (current_loops)
1937 struct loop *loop = bb->loop_father;
1939 /* If a loop gets removed, clean up the information associated
1940 with it. */
1941 if (loop->latch == bb
1942 || loop->header == bb)
1943 free_numbers_of_iterations_estimates_loop (loop);
1946 /* Remove all the instructions in the block. */
1947 for (i = bsi_start (bb); !bsi_end_p (i);)
1949 tree stmt = bsi_stmt (i);
1950 if (TREE_CODE (stmt) == LABEL_EXPR
1951 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
1952 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
1954 basic_block new_bb;
1955 block_stmt_iterator new_bsi;
1957 /* A non-reachable non-local label may still be referenced.
1958 But it no longer needs to carry the extra semantics of
1959 non-locality. */
1960 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1962 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
1963 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
1966 new_bb = bb->prev_bb;
1967 new_bsi = bsi_start (new_bb);
1968 bsi_remove (&i, false);
1969 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
1971 else
1973 /* Release SSA definitions if we are in SSA. Note that we
1974 may be called when not in SSA. For example,
1975 final_cleanup calls this function via
1976 cleanup_tree_cfg. */
1977 if (gimple_in_ssa_p (cfun))
1978 release_defs (stmt);
1980 bsi_remove (&i, true);
1983 /* Don't warn for removed gotos. Gotos are often removed due to
1984 jump threading, thus resulting in bogus warnings. Not great,
1985 since this way we lose warnings for gotos in the original
1986 program that are indeed unreachable. */
1987 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1989 #ifdef USE_MAPPED_LOCATION
1990 if (EXPR_HAS_LOCATION (stmt))
1991 loc = EXPR_LOCATION (stmt);
1992 #else
1993 source_locus t;
1994 t = EXPR_LOCUS (stmt);
1995 if (t && LOCATION_LINE (*t) > 0)
1996 loc = t;
1997 #endif
2001 /* If requested, give a warning that the first statement in the
2002 block is unreachable. We walk statements backwards in the
2003 loop above, so the last statement we process is the first statement
2004 in the block. */
2005 #ifdef USE_MAPPED_LOCATION
2006 if (loc > BUILTINS_LOCATION)
2007 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2008 #else
2009 if (loc)
2010 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2011 #endif
2013 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2017 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2018 predicate VAL, return the edge that will be taken out of the block.
2019 If VAL does not match a unique edge, NULL is returned. */
2021 edge
2022 find_taken_edge (basic_block bb, tree val)
2024 tree stmt;
2026 stmt = last_stmt (bb);
2028 gcc_assert (stmt);
2029 gcc_assert (is_ctrl_stmt (stmt));
2030 gcc_assert (val);
2032 if (! is_gimple_min_invariant (val))
2033 return NULL;
2035 if (TREE_CODE (stmt) == COND_EXPR)
2036 return find_taken_edge_cond_expr (bb, val);
2038 if (TREE_CODE (stmt) == SWITCH_EXPR)
2039 return find_taken_edge_switch_expr (bb, val);
2041 if (computed_goto_p (stmt))
2042 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2044 gcc_unreachable ();
2047 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2048 statement, determine which of the outgoing edges will be taken out of the
2049 block. Return NULL if either edge may be taken. */
2051 static edge
2052 find_taken_edge_computed_goto (basic_block bb, tree val)
2054 basic_block dest;
2055 edge e = NULL;
2057 dest = label_to_block (val);
2058 if (dest)
2060 e = find_edge (bb, dest);
2061 gcc_assert (e != NULL);
2064 return e;
2067 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2068 statement, determine which of the two edges will be taken out of the
2069 block. Return NULL if either edge may be taken. */
2071 static edge
2072 find_taken_edge_cond_expr (basic_block bb, tree val)
2074 edge true_edge, false_edge;
2076 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2078 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2079 return (integer_zerop (val) ? false_edge : true_edge);
2082 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2083 statement, determine which edge will be taken out of the block. Return
2084 NULL if any edge may be taken. */
2086 static edge
2087 find_taken_edge_switch_expr (basic_block bb, tree val)
2089 tree switch_expr, taken_case;
2090 basic_block dest_bb;
2091 edge e;
2093 switch_expr = last_stmt (bb);
2094 taken_case = find_case_label_for_value (switch_expr, val);
2095 dest_bb = label_to_block (CASE_LABEL (taken_case));
2097 e = find_edge (bb, dest_bb);
2098 gcc_assert (e);
2099 return e;
2103 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2104 We can make optimal use here of the fact that the case labels are
2105 sorted: We can do a binary search for a case matching VAL. */
2107 static tree
2108 find_case_label_for_value (tree switch_expr, tree val)
2110 tree vec = SWITCH_LABELS (switch_expr);
2111 size_t low, high, n = TREE_VEC_LENGTH (vec);
2112 tree default_case = TREE_VEC_ELT (vec, n - 1);
2114 for (low = -1, high = n - 1; high - low > 1; )
2116 size_t i = (high + low) / 2;
2117 tree t = TREE_VEC_ELT (vec, i);
2118 int cmp;
2120 /* Cache the result of comparing CASE_LOW and val. */
2121 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2123 if (cmp > 0)
2124 high = i;
2125 else
2126 low = i;
2128 if (CASE_HIGH (t) == NULL)
2130 /* A singe-valued case label. */
2131 if (cmp == 0)
2132 return t;
2134 else
2136 /* A case range. We can only handle integer ranges. */
2137 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2138 return t;
2142 return default_case;
2148 /*---------------------------------------------------------------------------
2149 Debugging functions
2150 ---------------------------------------------------------------------------*/
2152 /* Dump tree-specific information of block BB to file OUTF. */
2154 void
2155 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2157 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2161 /* Dump a basic block on stderr. */
2163 void
2164 debug_tree_bb (basic_block bb)
2166 dump_bb (bb, stderr, 0);
2170 /* Dump basic block with index N on stderr. */
2172 basic_block
2173 debug_tree_bb_n (int n)
2175 debug_tree_bb (BASIC_BLOCK (n));
2176 return BASIC_BLOCK (n);
2180 /* Dump the CFG on stderr.
2182 FLAGS are the same used by the tree dumping functions
2183 (see TDF_* in tree-pass.h). */
2185 void
2186 debug_tree_cfg (int flags)
2188 dump_tree_cfg (stderr, flags);
2192 /* Dump the program showing basic block boundaries on the given FILE.
2194 FLAGS are the same used by the tree dumping functions (see TDF_* in
2195 tree.h). */
2197 void
2198 dump_tree_cfg (FILE *file, int flags)
2200 if (flags & TDF_DETAILS)
2202 const char *funcname
2203 = lang_hooks.decl_printable_name (current_function_decl, 2);
2205 fputc ('\n', file);
2206 fprintf (file, ";; Function %s\n\n", funcname);
2207 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2208 n_basic_blocks, n_edges, last_basic_block);
2210 brief_dump_cfg (file);
2211 fprintf (file, "\n");
2214 if (flags & TDF_STATS)
2215 dump_cfg_stats (file);
2217 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2221 /* Dump CFG statistics on FILE. */
2223 void
2224 dump_cfg_stats (FILE *file)
2226 static long max_num_merged_labels = 0;
2227 unsigned long size, total = 0;
2228 long num_edges;
2229 basic_block bb;
2230 const char * const fmt_str = "%-30s%-13s%12s\n";
2231 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2232 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2233 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2234 const char *funcname
2235 = lang_hooks.decl_printable_name (current_function_decl, 2);
2238 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2240 fprintf (file, "---------------------------------------------------------\n");
2241 fprintf (file, fmt_str, "", " Number of ", "Memory");
2242 fprintf (file, fmt_str, "", " instances ", "used ");
2243 fprintf (file, "---------------------------------------------------------\n");
2245 size = n_basic_blocks * sizeof (struct basic_block_def);
2246 total += size;
2247 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2248 SCALE (size), LABEL (size));
2250 num_edges = 0;
2251 FOR_EACH_BB (bb)
2252 num_edges += EDGE_COUNT (bb->succs);
2253 size = num_edges * sizeof (struct edge_def);
2254 total += size;
2255 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2257 fprintf (file, "---------------------------------------------------------\n");
2258 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2259 LABEL (total));
2260 fprintf (file, "---------------------------------------------------------\n");
2261 fprintf (file, "\n");
2263 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2264 max_num_merged_labels = cfg_stats.num_merged_labels;
2266 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2267 cfg_stats.num_merged_labels, max_num_merged_labels);
2269 fprintf (file, "\n");
2273 /* Dump CFG statistics on stderr. Keep extern so that it's always
2274 linked in the final executable. */
2276 void
2277 debug_cfg_stats (void)
2279 dump_cfg_stats (stderr);
2283 /* Dump the flowgraph to a .vcg FILE. */
2285 static void
2286 tree_cfg2vcg (FILE *file)
2288 edge e;
2289 edge_iterator ei;
2290 basic_block bb;
2291 const char *funcname
2292 = lang_hooks.decl_printable_name (current_function_decl, 2);
2294 /* Write the file header. */
2295 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2296 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2297 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2299 /* Write blocks and edges. */
2300 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2302 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2303 e->dest->index);
2305 if (e->flags & EDGE_FAKE)
2306 fprintf (file, " linestyle: dotted priority: 10");
2307 else
2308 fprintf (file, " linestyle: solid priority: 100");
2310 fprintf (file, " }\n");
2312 fputc ('\n', file);
2314 FOR_EACH_BB (bb)
2316 enum tree_code head_code, end_code;
2317 const char *head_name, *end_name;
2318 int head_line = 0;
2319 int end_line = 0;
2320 tree first = first_stmt (bb);
2321 tree last = last_stmt (bb);
2323 if (first)
2325 head_code = TREE_CODE (first);
2326 head_name = tree_code_name[head_code];
2327 head_line = get_lineno (first);
2329 else
2330 head_name = "no-statement";
2332 if (last)
2334 end_code = TREE_CODE (last);
2335 end_name = tree_code_name[end_code];
2336 end_line = get_lineno (last);
2338 else
2339 end_name = "no-statement";
2341 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2342 bb->index, bb->index, head_name, head_line, end_name,
2343 end_line);
2345 FOR_EACH_EDGE (e, ei, bb->succs)
2347 if (e->dest == EXIT_BLOCK_PTR)
2348 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2349 else
2350 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2352 if (e->flags & EDGE_FAKE)
2353 fprintf (file, " priority: 10 linestyle: dotted");
2354 else
2355 fprintf (file, " priority: 100 linestyle: solid");
2357 fprintf (file, " }\n");
2360 if (bb->next_bb != EXIT_BLOCK_PTR)
2361 fputc ('\n', file);
2364 fputs ("}\n\n", file);
2369 /*---------------------------------------------------------------------------
2370 Miscellaneous helpers
2371 ---------------------------------------------------------------------------*/
2373 /* Return true if T represents a stmt that always transfers control. */
2375 bool
2376 is_ctrl_stmt (tree t)
2378 return (TREE_CODE (t) == COND_EXPR
2379 || TREE_CODE (t) == SWITCH_EXPR
2380 || TREE_CODE (t) == GOTO_EXPR
2381 || TREE_CODE (t) == RETURN_EXPR
2382 || TREE_CODE (t) == RESX_EXPR);
2386 /* Return true if T is a statement that may alter the flow of control
2387 (e.g., a call to a non-returning function). */
2389 bool
2390 is_ctrl_altering_stmt (tree t)
2392 tree call;
2394 gcc_assert (t);
2395 call = get_call_expr_in (t);
2396 if (call)
2398 /* A non-pure/const CALL_EXPR alters flow control if the current
2399 function has nonlocal labels. */
2400 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2401 return true;
2403 /* A CALL_EXPR also alters control flow if it does not return. */
2404 if (call_expr_flags (call) & ECF_NORETURN)
2405 return true;
2408 /* OpenMP directives alter control flow. */
2409 if (OMP_DIRECTIVE_P (t))
2410 return true;
2412 /* If a statement can throw, it alters control flow. */
2413 return tree_can_throw_internal (t);
2417 /* Return true if T is a computed goto. */
2419 bool
2420 computed_goto_p (tree t)
2422 return (TREE_CODE (t) == GOTO_EXPR
2423 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2427 /* Return true if T is a simple local goto. */
2429 bool
2430 simple_goto_p (tree t)
2432 return (TREE_CODE (t) == GOTO_EXPR
2433 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2437 /* Return true if T can make an abnormal transfer of control flow.
2438 Transfers of control flow associated with EH are excluded. */
2440 bool
2441 tree_can_make_abnormal_goto (tree t)
2443 if (computed_goto_p (t))
2444 return true;
2445 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2446 t = GIMPLE_STMT_OPERAND (t, 1);
2447 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2448 t = TREE_OPERAND (t, 0);
2449 if (TREE_CODE (t) == CALL_EXPR)
2450 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2451 return false;
2455 /* Return true if T should start a new basic block. PREV_T is the
2456 statement preceding T. It is used when T is a label or a case label.
2457 Labels should only start a new basic block if their previous statement
2458 wasn't a label. Otherwise, sequence of labels would generate
2459 unnecessary basic blocks that only contain a single label. */
2461 static inline bool
2462 stmt_starts_bb_p (tree t, tree prev_t)
2464 if (t == NULL_TREE)
2465 return false;
2467 /* LABEL_EXPRs start a new basic block only if the preceding
2468 statement wasn't a label of the same type. This prevents the
2469 creation of consecutive blocks that have nothing but a single
2470 label. */
2471 if (TREE_CODE (t) == LABEL_EXPR)
2473 /* Nonlocal and computed GOTO targets always start a new block. */
2474 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2475 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2476 return true;
2478 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2480 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2481 return true;
2483 cfg_stats.num_merged_labels++;
2484 return false;
2486 else
2487 return true;
2490 return false;
2494 /* Return true if T should end a basic block. */
2496 bool
2497 stmt_ends_bb_p (tree t)
2499 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2503 /* Add gotos that used to be represented implicitly in the CFG. */
2505 void
2506 disband_implicit_edges (void)
2508 basic_block bb;
2509 block_stmt_iterator last;
2510 edge e;
2511 edge_iterator ei;
2512 tree stmt, label;
2514 FOR_EACH_BB (bb)
2516 last = bsi_last (bb);
2517 stmt = last_stmt (bb);
2519 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2521 /* Remove superfluous gotos from COND_EXPR branches. Moved
2522 from cfg_remove_useless_stmts here since it violates the
2523 invariants for tree--cfg correspondence and thus fits better
2524 here where we do it anyway. */
2525 e = find_edge (bb, bb->next_bb);
2526 if (e)
2528 if (e->flags & EDGE_TRUE_VALUE)
2529 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2530 else if (e->flags & EDGE_FALSE_VALUE)
2531 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2532 else
2533 gcc_unreachable ();
2534 e->flags |= EDGE_FALLTHRU;
2537 continue;
2540 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2542 /* Remove the RETURN_EXPR if we may fall though to the exit
2543 instead. */
2544 gcc_assert (single_succ_p (bb));
2545 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2547 if (bb->next_bb == EXIT_BLOCK_PTR
2548 && !TREE_OPERAND (stmt, 0))
2550 bsi_remove (&last, true);
2551 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2553 continue;
2556 /* There can be no fallthru edge if the last statement is a control
2557 one. */
2558 if (stmt && is_ctrl_stmt (stmt))
2559 continue;
2561 /* Find a fallthru edge and emit the goto if necessary. */
2562 FOR_EACH_EDGE (e, ei, bb->succs)
2563 if (e->flags & EDGE_FALLTHRU)
2564 break;
2566 if (!e || e->dest == bb->next_bb)
2567 continue;
2569 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2570 label = tree_block_label (e->dest);
2572 stmt = build1 (GOTO_EXPR, void_type_node, label);
2573 #ifdef USE_MAPPED_LOCATION
2574 SET_EXPR_LOCATION (stmt, e->goto_locus);
2575 #else
2576 SET_EXPR_LOCUS (stmt, e->goto_locus);
2577 #endif
2578 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2579 e->flags &= ~EDGE_FALLTHRU;
2583 /* Remove block annotations and other datastructures. */
2585 void
2586 delete_tree_cfg_annotations (void)
2588 basic_block bb;
2589 block_stmt_iterator bsi;
2591 /* Remove annotations from every tree in the function. */
2592 FOR_EACH_BB (bb)
2593 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2595 tree stmt = bsi_stmt (bsi);
2596 ggc_free (stmt->base.ann);
2597 stmt->base.ann = NULL;
2599 label_to_block_map = NULL;
2603 /* Return the first statement in basic block BB. */
2605 tree
2606 first_stmt (basic_block bb)
2608 block_stmt_iterator i = bsi_start (bb);
2609 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2613 /* Return the last statement in basic block BB. */
2615 tree
2616 last_stmt (basic_block bb)
2618 block_stmt_iterator b = bsi_last (bb);
2619 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2623 /* Return the last statement of an otherwise empty block. Return NULL
2624 if the block is totally empty, or if it contains more than one
2625 statement. */
2627 tree
2628 last_and_only_stmt (basic_block bb)
2630 block_stmt_iterator i = bsi_last (bb);
2631 tree last, prev;
2633 if (bsi_end_p (i))
2634 return NULL_TREE;
2636 last = bsi_stmt (i);
2637 bsi_prev (&i);
2638 if (bsi_end_p (i))
2639 return last;
2641 /* Empty statements should no longer appear in the instruction stream.
2642 Everything that might have appeared before should be deleted by
2643 remove_useless_stmts, and the optimizers should just bsi_remove
2644 instead of smashing with build_empty_stmt.
2646 Thus the only thing that should appear here in a block containing
2647 one executable statement is a label. */
2648 prev = bsi_stmt (i);
2649 if (TREE_CODE (prev) == LABEL_EXPR)
2650 return last;
2651 else
2652 return NULL_TREE;
2656 /* Mark BB as the basic block holding statement T. */
2658 void
2659 set_bb_for_stmt (tree t, basic_block bb)
2661 if (TREE_CODE (t) == PHI_NODE)
2662 PHI_BB (t) = bb;
2663 else if (TREE_CODE (t) == STATEMENT_LIST)
2665 tree_stmt_iterator i;
2666 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2667 set_bb_for_stmt (tsi_stmt (i), bb);
2669 else
2671 stmt_ann_t ann = get_stmt_ann (t);
2672 ann->bb = bb;
2674 /* If the statement is a label, add the label to block-to-labels map
2675 so that we can speed up edge creation for GOTO_EXPRs. */
2676 if (TREE_CODE (t) == LABEL_EXPR)
2678 int uid;
2680 t = LABEL_EXPR_LABEL (t);
2681 uid = LABEL_DECL_UID (t);
2682 if (uid == -1)
2684 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2685 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2686 if (old_len <= (unsigned) uid)
2688 unsigned new_len = 3 * uid / 2;
2690 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2691 new_len);
2694 else
2695 /* We're moving an existing label. Make sure that we've
2696 removed it from the old block. */
2697 gcc_assert (!bb
2698 || !VEC_index (basic_block, label_to_block_map, uid));
2699 VEC_replace (basic_block, label_to_block_map, uid, bb);
2704 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2705 from one basic block to another.
2706 For BB splitting we can run into quadratic case, so performance is quite
2707 important and knowing that the tables are big enough, change_bb_for_stmt
2708 can inline as leaf function. */
2709 static inline void
2710 change_bb_for_stmt (tree t, basic_block bb)
2712 get_stmt_ann (t)->bb = bb;
2713 if (TREE_CODE (t) == LABEL_EXPR)
2714 VEC_replace (basic_block, label_to_block_map,
2715 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2718 /* Finds iterator for STMT. */
2720 extern block_stmt_iterator
2721 bsi_for_stmt (tree stmt)
2723 block_stmt_iterator bsi;
2725 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2726 if (bsi_stmt (bsi) == stmt)
2727 return bsi;
2729 gcc_unreachable ();
2732 /* Mark statement T as modified, and update it. */
2733 static inline void
2734 update_modified_stmts (tree t)
2736 if (!ssa_operands_active ())
2737 return;
2738 if (TREE_CODE (t) == STATEMENT_LIST)
2740 tree_stmt_iterator i;
2741 tree stmt;
2742 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2744 stmt = tsi_stmt (i);
2745 update_stmt_if_modified (stmt);
2748 else
2749 update_stmt_if_modified (t);
2752 /* Insert statement (or statement list) T before the statement
2753 pointed-to by iterator I. M specifies how to update iterator I
2754 after insertion (see enum bsi_iterator_update). */
2756 void
2757 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2759 set_bb_for_stmt (t, i->bb);
2760 update_modified_stmts (t);
2761 tsi_link_before (&i->tsi, t, m);
2765 /* Insert statement (or statement list) T after the statement
2766 pointed-to by iterator I. M specifies how to update iterator I
2767 after insertion (see enum bsi_iterator_update). */
2769 void
2770 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2772 set_bb_for_stmt (t, i->bb);
2773 update_modified_stmts (t);
2774 tsi_link_after (&i->tsi, t, m);
2778 /* Remove the statement pointed to by iterator I. The iterator is updated
2779 to the next statement.
2781 When REMOVE_EH_INFO is true we remove the statement pointed to by
2782 iterator I from the EH tables. Otherwise we do not modify the EH
2783 tables.
2785 Generally, REMOVE_EH_INFO should be true when the statement is going to
2786 be removed from the IL and not reinserted elsewhere. */
2788 void
2789 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2791 tree t = bsi_stmt (*i);
2792 set_bb_for_stmt (t, NULL);
2793 delink_stmt_imm_use (t);
2794 tsi_delink (&i->tsi);
2795 mark_stmt_modified (t);
2796 if (remove_eh_info)
2798 remove_stmt_from_eh_region (t);
2799 gimple_remove_stmt_histograms (cfun, t);
2804 /* Move the statement at FROM so it comes right after the statement at TO. */
2806 void
2807 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2809 tree stmt = bsi_stmt (*from);
2810 bsi_remove (from, false);
2811 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2815 /* Move the statement at FROM so it comes right before the statement at TO. */
2817 void
2818 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2820 tree stmt = bsi_stmt (*from);
2821 bsi_remove (from, false);
2822 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2826 /* Move the statement at FROM to the end of basic block BB. */
2828 void
2829 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2831 block_stmt_iterator last = bsi_last (bb);
2833 /* Have to check bsi_end_p because it could be an empty block. */
2834 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2835 bsi_move_before (from, &last);
2836 else
2837 bsi_move_after (from, &last);
2841 /* Replace the contents of the statement pointed to by iterator BSI
2842 with STMT. If UPDATE_EH_INFO is true, the exception handling
2843 information of the original statement is moved to the new statement. */
2845 void
2846 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2848 int eh_region;
2849 tree orig_stmt = bsi_stmt (*bsi);
2851 if (stmt == orig_stmt)
2852 return;
2853 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2854 set_bb_for_stmt (stmt, bsi->bb);
2856 /* Preserve EH region information from the original statement, if
2857 requested by the caller. */
2858 if (update_eh_info)
2860 eh_region = lookup_stmt_eh_region (orig_stmt);
2861 if (eh_region >= 0)
2863 remove_stmt_from_eh_region (orig_stmt);
2864 add_stmt_to_eh_region (stmt, eh_region);
2868 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2869 gimple_remove_stmt_histograms (cfun, orig_stmt);
2870 delink_stmt_imm_use (orig_stmt);
2871 *bsi_stmt_ptr (*bsi) = stmt;
2872 mark_stmt_modified (stmt);
2873 update_modified_stmts (stmt);
2877 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2878 is made to place the statement in an existing basic block, but
2879 sometimes that isn't possible. When it isn't possible, the edge is
2880 split and the statement is added to the new block.
2882 In all cases, the returned *BSI points to the correct location. The
2883 return value is true if insertion should be done after the location,
2884 or false if it should be done before the location. If new basic block
2885 has to be created, it is stored in *NEW_BB. */
2887 static bool
2888 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2889 basic_block *new_bb)
2891 basic_block dest, src;
2892 tree tmp;
2894 dest = e->dest;
2895 restart:
2897 /* If the destination has one predecessor which has no PHI nodes,
2898 insert there. Except for the exit block.
2900 The requirement for no PHI nodes could be relaxed. Basically we
2901 would have to examine the PHIs to prove that none of them used
2902 the value set by the statement we want to insert on E. That
2903 hardly seems worth the effort. */
2904 if (single_pred_p (dest)
2905 && ! phi_nodes (dest)
2906 && dest != EXIT_BLOCK_PTR)
2908 *bsi = bsi_start (dest);
2909 if (bsi_end_p (*bsi))
2910 return true;
2912 /* Make sure we insert after any leading labels. */
2913 tmp = bsi_stmt (*bsi);
2914 while (TREE_CODE (tmp) == LABEL_EXPR)
2916 bsi_next (bsi);
2917 if (bsi_end_p (*bsi))
2918 break;
2919 tmp = bsi_stmt (*bsi);
2922 if (bsi_end_p (*bsi))
2924 *bsi = bsi_last (dest);
2925 return true;
2927 else
2928 return false;
2931 /* If the source has one successor, the edge is not abnormal and
2932 the last statement does not end a basic block, insert there.
2933 Except for the entry block. */
2934 src = e->src;
2935 if ((e->flags & EDGE_ABNORMAL) == 0
2936 && single_succ_p (src)
2937 && src != ENTRY_BLOCK_PTR)
2939 *bsi = bsi_last (src);
2940 if (bsi_end_p (*bsi))
2941 return true;
2943 tmp = bsi_stmt (*bsi);
2944 if (!stmt_ends_bb_p (tmp))
2945 return true;
2947 /* Insert code just before returning the value. We may need to decompose
2948 the return in the case it contains non-trivial operand. */
2949 if (TREE_CODE (tmp) == RETURN_EXPR)
2951 tree op = TREE_OPERAND (tmp, 0);
2952 if (op && !is_gimple_val (op))
2954 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2955 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2956 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2958 bsi_prev (bsi);
2959 return true;
2963 /* Otherwise, create a new basic block, and split this edge. */
2964 dest = split_edge (e);
2965 if (new_bb)
2966 *new_bb = dest;
2967 e = single_pred_edge (dest);
2968 goto restart;
2972 /* This routine will commit all pending edge insertions, creating any new
2973 basic blocks which are necessary. */
2975 void
2976 bsi_commit_edge_inserts (void)
2978 basic_block bb;
2979 edge e;
2980 edge_iterator ei;
2982 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2984 FOR_EACH_BB (bb)
2985 FOR_EACH_EDGE (e, ei, bb->succs)
2986 bsi_commit_one_edge_insert (e, NULL);
2990 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2991 to this block, otherwise set it to NULL. */
2993 void
2994 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2996 if (new_bb)
2997 *new_bb = NULL;
2998 if (PENDING_STMT (e))
3000 block_stmt_iterator bsi;
3001 tree stmt = PENDING_STMT (e);
3003 PENDING_STMT (e) = NULL_TREE;
3005 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3006 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3007 else
3008 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3013 /* Add STMT to the pending list of edge E. No actual insertion is
3014 made until a call to bsi_commit_edge_inserts () is made. */
3016 void
3017 bsi_insert_on_edge (edge e, tree stmt)
3019 append_to_statement_list (stmt, &PENDING_STMT (e));
3022 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3023 block has to be created, it is returned. */
3025 basic_block
3026 bsi_insert_on_edge_immediate (edge e, tree stmt)
3028 block_stmt_iterator bsi;
3029 basic_block new_bb = NULL;
3031 gcc_assert (!PENDING_STMT (e));
3033 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3034 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3035 else
3036 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3038 return new_bb;
3041 /*---------------------------------------------------------------------------
3042 Tree specific functions for CFG manipulation
3043 ---------------------------------------------------------------------------*/
3045 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3047 static void
3048 reinstall_phi_args (edge new_edge, edge old_edge)
3050 tree var, phi;
3052 if (!PENDING_STMT (old_edge))
3053 return;
3055 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3056 var && phi;
3057 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3059 tree result = TREE_PURPOSE (var);
3060 tree arg = TREE_VALUE (var);
3062 gcc_assert (result == PHI_RESULT (phi));
3064 add_phi_arg (phi, arg, new_edge);
3067 PENDING_STMT (old_edge) = NULL;
3070 /* Returns the basic block after which the new basic block created
3071 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3072 near its "logical" location. This is of most help to humans looking
3073 at debugging dumps. */
3075 static basic_block
3076 split_edge_bb_loc (edge edge_in)
3078 basic_block dest = edge_in->dest;
3080 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3081 return edge_in->src;
3082 else
3083 return dest->prev_bb;
3086 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3087 Abort on abnormal edges. */
3089 static basic_block
3090 tree_split_edge (edge edge_in)
3092 basic_block new_bb, after_bb, dest;
3093 edge new_edge, e;
3095 /* Abnormal edges cannot be split. */
3096 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3098 dest = edge_in->dest;
3100 after_bb = split_edge_bb_loc (edge_in);
3102 new_bb = create_empty_bb (after_bb);
3103 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3104 new_bb->count = edge_in->count;
3105 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3106 new_edge->probability = REG_BR_PROB_BASE;
3107 new_edge->count = edge_in->count;
3109 e = redirect_edge_and_branch (edge_in, new_bb);
3110 gcc_assert (e);
3111 reinstall_phi_args (new_edge, e);
3113 return new_bb;
3117 /* Return true when BB has label LABEL in it. */
3119 static bool
3120 has_label_p (basic_block bb, tree label)
3122 block_stmt_iterator bsi;
3124 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3126 tree stmt = bsi_stmt (bsi);
3128 if (TREE_CODE (stmt) != LABEL_EXPR)
3129 return false;
3130 if (LABEL_EXPR_LABEL (stmt) == label)
3131 return true;
3133 return false;
3137 /* Callback for walk_tree, check that all elements with address taken are
3138 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3139 inside a PHI node. */
3141 static tree
3142 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3144 tree t = *tp, x;
3145 bool in_phi = (data != NULL);
3147 if (TYPE_P (t))
3148 *walk_subtrees = 0;
3150 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3151 #define CHECK_OP(N, MSG) \
3152 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3153 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3155 switch (TREE_CODE (t))
3157 case SSA_NAME:
3158 if (SSA_NAME_IN_FREE_LIST (t))
3160 error ("SSA name in freelist but still referenced");
3161 return *tp;
3163 break;
3165 case ASSERT_EXPR:
3166 x = fold (ASSERT_EXPR_COND (t));
3167 if (x == boolean_false_node)
3169 error ("ASSERT_EXPR with an always-false condition");
3170 return *tp;
3172 break;
3174 case MODIFY_EXPR:
3175 gcc_unreachable ();
3177 case GIMPLE_MODIFY_STMT:
3178 x = GIMPLE_STMT_OPERAND (t, 0);
3179 if (TREE_CODE (x) == BIT_FIELD_REF
3180 && is_gimple_reg (TREE_OPERAND (x, 0)))
3182 error ("GIMPLE register modified with BIT_FIELD_REF");
3183 return t;
3185 break;
3187 case ADDR_EXPR:
3189 bool old_invariant;
3190 bool old_constant;
3191 bool old_side_effects;
3192 bool new_invariant;
3193 bool new_constant;
3194 bool new_side_effects;
3196 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3197 dead PHIs that take the address of something. But if the PHI
3198 result is dead, the fact that it takes the address of anything
3199 is irrelevant. Because we can not tell from here if a PHI result
3200 is dead, we just skip this check for PHIs altogether. This means
3201 we may be missing "valid" checks, but what can you do?
3202 This was PR19217. */
3203 if (in_phi)
3204 break;
3206 old_invariant = TREE_INVARIANT (t);
3207 old_constant = TREE_CONSTANT (t);
3208 old_side_effects = TREE_SIDE_EFFECTS (t);
3210 recompute_tree_invariant_for_addr_expr (t);
3211 new_invariant = TREE_INVARIANT (t);
3212 new_side_effects = TREE_SIDE_EFFECTS (t);
3213 new_constant = TREE_CONSTANT (t);
3215 if (old_invariant != new_invariant)
3217 error ("invariant not recomputed when ADDR_EXPR changed");
3218 return t;
3221 if (old_constant != new_constant)
3223 error ("constant not recomputed when ADDR_EXPR changed");
3224 return t;
3226 if (old_side_effects != new_side_effects)
3228 error ("side effects not recomputed when ADDR_EXPR changed");
3229 return t;
3232 /* Skip any references (they will be checked when we recurse down the
3233 tree) and ensure that any variable used as a prefix is marked
3234 addressable. */
3235 for (x = TREE_OPERAND (t, 0);
3236 handled_component_p (x);
3237 x = TREE_OPERAND (x, 0))
3240 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3241 return NULL;
3242 if (!TREE_ADDRESSABLE (x))
3244 error ("address taken, but ADDRESSABLE bit not set");
3245 return x;
3247 break;
3250 case COND_EXPR:
3251 x = COND_EXPR_COND (t);
3252 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3254 error ("non-boolean used in condition");
3255 return x;
3257 if (!is_gimple_condexpr (x))
3259 error ("invalid conditional operand");
3260 return x;
3262 break;
3264 case NOP_EXPR:
3265 case CONVERT_EXPR:
3266 case FIX_TRUNC_EXPR:
3267 case FLOAT_EXPR:
3268 case NEGATE_EXPR:
3269 case ABS_EXPR:
3270 case BIT_NOT_EXPR:
3271 case NON_LVALUE_EXPR:
3272 case TRUTH_NOT_EXPR:
3273 CHECK_OP (0, "invalid operand to unary operator");
3274 break;
3276 case REALPART_EXPR:
3277 case IMAGPART_EXPR:
3278 case COMPONENT_REF:
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 case BIT_FIELD_REF:
3282 case VIEW_CONVERT_EXPR:
3283 /* We have a nest of references. Verify that each of the operands
3284 that determine where to reference is either a constant or a variable,
3285 verify that the base is valid, and then show we've already checked
3286 the subtrees. */
3287 while (handled_component_p (t))
3289 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3290 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3291 else if (TREE_CODE (t) == ARRAY_REF
3292 || TREE_CODE (t) == ARRAY_RANGE_REF)
3294 CHECK_OP (1, "invalid array index");
3295 if (TREE_OPERAND (t, 2))
3296 CHECK_OP (2, "invalid array lower bound");
3297 if (TREE_OPERAND (t, 3))
3298 CHECK_OP (3, "invalid array stride");
3300 else if (TREE_CODE (t) == BIT_FIELD_REF)
3302 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3303 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3306 t = TREE_OPERAND (t, 0);
3309 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3311 error ("invalid reference prefix");
3312 return t;
3314 *walk_subtrees = 0;
3315 break;
3317 case LT_EXPR:
3318 case LE_EXPR:
3319 case GT_EXPR:
3320 case GE_EXPR:
3321 case EQ_EXPR:
3322 case NE_EXPR:
3323 case UNORDERED_EXPR:
3324 case ORDERED_EXPR:
3325 case UNLT_EXPR:
3326 case UNLE_EXPR:
3327 case UNGT_EXPR:
3328 case UNGE_EXPR:
3329 case UNEQ_EXPR:
3330 case LTGT_EXPR:
3331 case PLUS_EXPR:
3332 case MINUS_EXPR:
3333 case MULT_EXPR:
3334 case TRUNC_DIV_EXPR:
3335 case CEIL_DIV_EXPR:
3336 case FLOOR_DIV_EXPR:
3337 case ROUND_DIV_EXPR:
3338 case TRUNC_MOD_EXPR:
3339 case CEIL_MOD_EXPR:
3340 case FLOOR_MOD_EXPR:
3341 case ROUND_MOD_EXPR:
3342 case RDIV_EXPR:
3343 case EXACT_DIV_EXPR:
3344 case MIN_EXPR:
3345 case MAX_EXPR:
3346 case LSHIFT_EXPR:
3347 case RSHIFT_EXPR:
3348 case LROTATE_EXPR:
3349 case RROTATE_EXPR:
3350 case BIT_IOR_EXPR:
3351 case BIT_XOR_EXPR:
3352 case BIT_AND_EXPR:
3353 CHECK_OP (0, "invalid operand to binary operator");
3354 CHECK_OP (1, "invalid operand to binary operator");
3355 break;
3357 case CONSTRUCTOR:
3358 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3359 *walk_subtrees = 0;
3360 break;
3362 default:
3363 break;
3365 return NULL;
3367 #undef CHECK_OP
3371 /* Verify STMT, return true if STMT is not in GIMPLE form.
3372 TODO: Implement type checking. */
3374 static bool
3375 verify_stmt (tree stmt, bool last_in_block)
3377 tree addr;
3379 if (OMP_DIRECTIVE_P (stmt))
3381 /* OpenMP directives are validated by the FE and never operated
3382 on by the optimizers. Furthermore, OMP_FOR may contain
3383 non-gimple expressions when the main index variable has had
3384 its address taken. This does not affect the loop itself
3385 because the header of an OMP_FOR is merely used to determine
3386 how to setup the parallel iteration. */
3387 return false;
3390 if (!is_gimple_stmt (stmt))
3392 error ("is not a valid GIMPLE statement");
3393 goto fail;
3396 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3397 if (addr)
3399 debug_generic_stmt (addr);
3400 return true;
3403 /* If the statement is marked as part of an EH region, then it is
3404 expected that the statement could throw. Verify that when we
3405 have optimizations that simplify statements such that we prove
3406 that they cannot throw, that we update other data structures
3407 to match. */
3408 if (lookup_stmt_eh_region (stmt) >= 0)
3410 if (!tree_could_throw_p (stmt))
3412 error ("statement marked for throw, but doesn%'t");
3413 goto fail;
3415 if (!last_in_block && tree_can_throw_internal (stmt))
3417 error ("statement marked for throw in middle of block");
3418 goto fail;
3422 return false;
3424 fail:
3425 debug_generic_stmt (stmt);
3426 return true;
3430 /* Return true when the T can be shared. */
3432 static bool
3433 tree_node_can_be_shared (tree t)
3435 if (IS_TYPE_OR_DECL_P (t)
3436 || is_gimple_min_invariant (t)
3437 || TREE_CODE (t) == SSA_NAME
3438 || t == error_mark_node
3439 || TREE_CODE (t) == IDENTIFIER_NODE)
3440 return true;
3442 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3443 return true;
3445 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3446 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3447 || TREE_CODE (t) == COMPONENT_REF
3448 || TREE_CODE (t) == REALPART_EXPR
3449 || TREE_CODE (t) == IMAGPART_EXPR)
3450 t = TREE_OPERAND (t, 0);
3452 if (DECL_P (t))
3453 return true;
3455 return false;
3459 /* Called via walk_trees. Verify tree sharing. */
3461 static tree
3462 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3464 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3466 if (tree_node_can_be_shared (*tp))
3468 *walk_subtrees = false;
3469 return NULL;
3472 if (pointer_set_insert (visited, *tp))
3473 return *tp;
3475 return NULL;
3479 /* Helper function for verify_gimple_tuples. */
3481 static tree
3482 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3483 void *data ATTRIBUTE_UNUSED)
3485 switch (TREE_CODE (*tp))
3487 case MODIFY_EXPR:
3488 error ("unexpected non-tuple");
3489 debug_tree (*tp);
3490 gcc_unreachable ();
3491 return NULL_TREE;
3493 default:
3494 return NULL_TREE;
3498 /* Verify that there are no trees that should have been converted to
3499 gimple tuples. Return true if T contains a node that should have
3500 been converted to a gimple tuple, but hasn't. */
3502 static bool
3503 verify_gimple_tuples (tree t)
3505 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
3508 static bool eh_error_found;
3509 static int
3510 verify_eh_throw_stmt_node (void **slot, void *data)
3512 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
3513 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3515 if (!pointer_set_contains (visited, node->stmt))
3517 error ("Dead STMT in EH table");
3518 debug_generic_stmt (node->stmt);
3519 eh_error_found = true;
3521 return 0;
3524 /* Verify the GIMPLE statement chain. */
3526 void
3527 verify_stmts (void)
3529 basic_block bb;
3530 block_stmt_iterator bsi;
3531 bool err = false;
3532 struct pointer_set_t *visited, *visited_stmts;
3533 tree addr;
3535 timevar_push (TV_TREE_STMT_VERIFY);
3536 visited = pointer_set_create ();
3537 visited_stmts = pointer_set_create ();
3539 FOR_EACH_BB (bb)
3541 tree phi;
3542 int i;
3544 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3546 int phi_num_args = PHI_NUM_ARGS (phi);
3548 pointer_set_insert (visited_stmts, phi);
3549 if (bb_for_stmt (phi) != bb)
3551 error ("bb_for_stmt (phi) is set to a wrong basic block");
3552 err |= true;
3555 for (i = 0; i < phi_num_args; i++)
3557 tree t = PHI_ARG_DEF (phi, i);
3558 tree addr;
3560 /* Addressable variables do have SSA_NAMEs but they
3561 are not considered gimple values. */
3562 if (TREE_CODE (t) != SSA_NAME
3563 && TREE_CODE (t) != FUNCTION_DECL
3564 && !is_gimple_val (t))
3566 error ("PHI def is not a GIMPLE value");
3567 debug_generic_stmt (phi);
3568 debug_generic_stmt (t);
3569 err |= true;
3572 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3573 if (addr)
3575 debug_generic_stmt (addr);
3576 err |= true;
3579 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
3580 if (addr)
3582 error ("incorrect sharing of tree nodes");
3583 debug_generic_stmt (phi);
3584 debug_generic_stmt (addr);
3585 err |= true;
3590 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3592 tree stmt = bsi_stmt (bsi);
3594 pointer_set_insert (visited_stmts, stmt);
3595 err |= verify_gimple_tuples (stmt);
3597 if (bb_for_stmt (stmt) != bb)
3599 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3600 err |= true;
3603 bsi_next (&bsi);
3604 err |= verify_stmt (stmt, bsi_end_p (bsi));
3605 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
3606 if (addr)
3608 error ("incorrect sharing of tree nodes");
3609 debug_generic_stmt (stmt);
3610 debug_generic_stmt (addr);
3611 err |= true;
3615 eh_error_found = false;
3616 if (get_eh_throw_stmt_table (cfun))
3617 htab_traverse (get_eh_throw_stmt_table (cfun),
3618 verify_eh_throw_stmt_node,
3619 visited_stmts);
3621 if (err | eh_error_found)
3622 internal_error ("verify_stmts failed");
3624 pointer_set_destroy (visited);
3625 pointer_set_destroy (visited_stmts);
3626 verify_histograms ();
3627 timevar_pop (TV_TREE_STMT_VERIFY);
3631 /* Verifies that the flow information is OK. */
3633 static int
3634 tree_verify_flow_info (void)
3636 int err = 0;
3637 basic_block bb;
3638 block_stmt_iterator bsi;
3639 tree stmt;
3640 edge e;
3641 edge_iterator ei;
3643 if (ENTRY_BLOCK_PTR->stmt_list)
3645 error ("ENTRY_BLOCK has a statement list associated with it");
3646 err = 1;
3649 if (EXIT_BLOCK_PTR->stmt_list)
3651 error ("EXIT_BLOCK has a statement list associated with it");
3652 err = 1;
3655 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3656 if (e->flags & EDGE_FALLTHRU)
3658 error ("fallthru to exit from bb %d", e->src->index);
3659 err = 1;
3662 FOR_EACH_BB (bb)
3664 bool found_ctrl_stmt = false;
3666 stmt = NULL_TREE;
3668 /* Skip labels on the start of basic block. */
3669 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3671 tree prev_stmt = stmt;
3673 stmt = bsi_stmt (bsi);
3675 if (TREE_CODE (stmt) != LABEL_EXPR)
3676 break;
3678 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3680 error ("nonlocal label ");
3681 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3682 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3683 bb->index);
3684 err = 1;
3687 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3689 error ("label ");
3690 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3691 fprintf (stderr, " to block does not match in bb %d",
3692 bb->index);
3693 err = 1;
3696 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3697 != current_function_decl)
3699 error ("label ");
3700 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3701 fprintf (stderr, " has incorrect context in bb %d",
3702 bb->index);
3703 err = 1;
3707 /* Verify that body of basic block BB is free of control flow. */
3708 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3710 tree stmt = bsi_stmt (bsi);
3712 if (found_ctrl_stmt)
3714 error ("control flow in the middle of basic block %d",
3715 bb->index);
3716 err = 1;
3719 if (stmt_ends_bb_p (stmt))
3720 found_ctrl_stmt = true;
3722 if (TREE_CODE (stmt) == LABEL_EXPR)
3724 error ("label ");
3725 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3726 fprintf (stderr, " in the middle of basic block %d", bb->index);
3727 err = 1;
3731 bsi = bsi_last (bb);
3732 if (bsi_end_p (bsi))
3733 continue;
3735 stmt = bsi_stmt (bsi);
3737 err |= verify_eh_edges (stmt);
3739 if (is_ctrl_stmt (stmt))
3741 FOR_EACH_EDGE (e, ei, bb->succs)
3742 if (e->flags & EDGE_FALLTHRU)
3744 error ("fallthru edge after a control statement in bb %d",
3745 bb->index);
3746 err = 1;
3750 if (TREE_CODE (stmt) != COND_EXPR)
3752 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3753 after anything else but if statement. */
3754 FOR_EACH_EDGE (e, ei, bb->succs)
3755 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3757 error ("true/false edge after a non-COND_EXPR in bb %d",
3758 bb->index);
3759 err = 1;
3763 switch (TREE_CODE (stmt))
3765 case COND_EXPR:
3767 edge true_edge;
3768 edge false_edge;
3769 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3770 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3772 error ("structured COND_EXPR at the end of bb %d", bb->index);
3773 err = 1;
3776 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3778 if (!true_edge || !false_edge
3779 || !(true_edge->flags & EDGE_TRUE_VALUE)
3780 || !(false_edge->flags & EDGE_FALSE_VALUE)
3781 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3782 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3783 || EDGE_COUNT (bb->succs) >= 3)
3785 error ("wrong outgoing edge flags at end of bb %d",
3786 bb->index);
3787 err = 1;
3790 if (!has_label_p (true_edge->dest,
3791 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3793 error ("%<then%> label does not match edge at end of bb %d",
3794 bb->index);
3795 err = 1;
3798 if (!has_label_p (false_edge->dest,
3799 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3801 error ("%<else%> label does not match edge at end of bb %d",
3802 bb->index);
3803 err = 1;
3806 break;
3808 case GOTO_EXPR:
3809 if (simple_goto_p (stmt))
3811 error ("explicit goto at end of bb %d", bb->index);
3812 err = 1;
3814 else
3816 /* FIXME. We should double check that the labels in the
3817 destination blocks have their address taken. */
3818 FOR_EACH_EDGE (e, ei, bb->succs)
3819 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3820 | EDGE_FALSE_VALUE))
3821 || !(e->flags & EDGE_ABNORMAL))
3823 error ("wrong outgoing edge flags at end of bb %d",
3824 bb->index);
3825 err = 1;
3828 break;
3830 case RETURN_EXPR:
3831 if (!single_succ_p (bb)
3832 || (single_succ_edge (bb)->flags
3833 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3834 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3836 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3837 err = 1;
3839 if (single_succ (bb) != EXIT_BLOCK_PTR)
3841 error ("return edge does not point to exit in bb %d",
3842 bb->index);
3843 err = 1;
3845 break;
3847 case SWITCH_EXPR:
3849 tree prev;
3850 edge e;
3851 size_t i, n;
3852 tree vec;
3854 vec = SWITCH_LABELS (stmt);
3855 n = TREE_VEC_LENGTH (vec);
3857 /* Mark all the destination basic blocks. */
3858 for (i = 0; i < n; ++i)
3860 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3861 basic_block label_bb = label_to_block (lab);
3863 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3864 label_bb->aux = (void *)1;
3867 /* Verify that the case labels are sorted. */
3868 prev = TREE_VEC_ELT (vec, 0);
3869 for (i = 1; i < n - 1; ++i)
3871 tree c = TREE_VEC_ELT (vec, i);
3872 if (! CASE_LOW (c))
3874 error ("found default case not at end of case vector");
3875 err = 1;
3876 continue;
3878 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3880 error ("case labels not sorted: ");
3881 print_generic_expr (stderr, prev, 0);
3882 fprintf (stderr," is greater than ");
3883 print_generic_expr (stderr, c, 0);
3884 fprintf (stderr," but comes before it.\n");
3885 err = 1;
3887 prev = c;
3889 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3891 error ("no default case found at end of case vector");
3892 err = 1;
3895 FOR_EACH_EDGE (e, ei, bb->succs)
3897 if (!e->dest->aux)
3899 error ("extra outgoing edge %d->%d",
3900 bb->index, e->dest->index);
3901 err = 1;
3903 e->dest->aux = (void *)2;
3904 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3905 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3907 error ("wrong outgoing edge flags at end of bb %d",
3908 bb->index);
3909 err = 1;
3913 /* Check that we have all of them. */
3914 for (i = 0; i < n; ++i)
3916 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3917 basic_block label_bb = label_to_block (lab);
3919 if (label_bb->aux != (void *)2)
3921 error ("missing edge %i->%i",
3922 bb->index, label_bb->index);
3923 err = 1;
3927 FOR_EACH_EDGE (e, ei, bb->succs)
3928 e->dest->aux = (void *)0;
3931 default: ;
3935 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3936 verify_dominators (CDI_DOMINATORS);
3938 return err;
3942 /* Updates phi nodes after creating a forwarder block joined
3943 by edge FALLTHRU. */
3945 static void
3946 tree_make_forwarder_block (edge fallthru)
3948 edge e;
3949 edge_iterator ei;
3950 basic_block dummy, bb;
3951 tree phi, new_phi, var;
3953 dummy = fallthru->src;
3954 bb = fallthru->dest;
3956 if (single_pred_p (bb))
3957 return;
3959 /* If we redirected a branch we must create new PHI nodes at the
3960 start of BB. */
3961 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3963 var = PHI_RESULT (phi);
3964 new_phi = create_phi_node (var, bb);
3965 SSA_NAME_DEF_STMT (var) = new_phi;
3966 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3967 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3970 /* Ensure that the PHI node chain is in the same order. */
3971 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3973 /* Add the arguments we have stored on edges. */
3974 FOR_EACH_EDGE (e, ei, bb->preds)
3976 if (e == fallthru)
3977 continue;
3979 flush_pending_stmts (e);
3984 /* Return a non-special label in the head of basic block BLOCK.
3985 Create one if it doesn't exist. */
3987 tree
3988 tree_block_label (basic_block bb)
3990 block_stmt_iterator i, s = bsi_start (bb);
3991 bool first = true;
3992 tree label, stmt;
3994 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3996 stmt = bsi_stmt (i);
3997 if (TREE_CODE (stmt) != LABEL_EXPR)
3998 break;
3999 label = LABEL_EXPR_LABEL (stmt);
4000 if (!DECL_NONLOCAL (label))
4002 if (!first)
4003 bsi_move_before (&i, &s);
4004 return label;
4008 label = create_artificial_label ();
4009 stmt = build1 (LABEL_EXPR, void_type_node, label);
4010 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4011 return label;
4015 /* Attempt to perform edge redirection by replacing a possibly complex
4016 jump instruction by a goto or by removing the jump completely.
4017 This can apply only if all edges now point to the same block. The
4018 parameters and return values are equivalent to
4019 redirect_edge_and_branch. */
4021 static edge
4022 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4024 basic_block src = e->src;
4025 block_stmt_iterator b;
4026 tree stmt;
4028 /* We can replace or remove a complex jump only when we have exactly
4029 two edges. */
4030 if (EDGE_COUNT (src->succs) != 2
4031 /* Verify that all targets will be TARGET. Specifically, the
4032 edge that is not E must also go to TARGET. */
4033 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4034 return NULL;
4036 b = bsi_last (src);
4037 if (bsi_end_p (b))
4038 return NULL;
4039 stmt = bsi_stmt (b);
4041 if (TREE_CODE (stmt) == COND_EXPR
4042 || TREE_CODE (stmt) == SWITCH_EXPR)
4044 bsi_remove (&b, true);
4045 e = ssa_redirect_edge (e, target);
4046 e->flags = EDGE_FALLTHRU;
4047 return e;
4050 return NULL;
4054 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4055 edge representing the redirected branch. */
4057 static edge
4058 tree_redirect_edge_and_branch (edge e, basic_block dest)
4060 basic_block bb = e->src;
4061 block_stmt_iterator bsi;
4062 edge ret;
4063 tree label, stmt;
4065 if (e->flags & EDGE_ABNORMAL)
4066 return NULL;
4068 if (e->src != ENTRY_BLOCK_PTR
4069 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4070 return ret;
4072 if (e->dest == dest)
4073 return NULL;
4075 label = tree_block_label (dest);
4077 bsi = bsi_last (bb);
4078 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4080 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4082 case COND_EXPR:
4083 stmt = (e->flags & EDGE_TRUE_VALUE
4084 ? COND_EXPR_THEN (stmt)
4085 : COND_EXPR_ELSE (stmt));
4086 GOTO_DESTINATION (stmt) = label;
4087 break;
4089 case GOTO_EXPR:
4090 /* No non-abnormal edges should lead from a non-simple goto, and
4091 simple ones should be represented implicitly. */
4092 gcc_unreachable ();
4094 case SWITCH_EXPR:
4096 tree cases = get_cases_for_edge (e, stmt);
4098 /* If we have a list of cases associated with E, then use it
4099 as it's a lot faster than walking the entire case vector. */
4100 if (cases)
4102 edge e2 = find_edge (e->src, dest);
4103 tree last, first;
4105 first = cases;
4106 while (cases)
4108 last = cases;
4109 CASE_LABEL (cases) = label;
4110 cases = TREE_CHAIN (cases);
4113 /* If there was already an edge in the CFG, then we need
4114 to move all the cases associated with E to E2. */
4115 if (e2)
4117 tree cases2 = get_cases_for_edge (e2, stmt);
4119 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4120 TREE_CHAIN (cases2) = first;
4123 else
4125 tree vec = SWITCH_LABELS (stmt);
4126 size_t i, n = TREE_VEC_LENGTH (vec);
4128 for (i = 0; i < n; i++)
4130 tree elt = TREE_VEC_ELT (vec, i);
4132 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4133 CASE_LABEL (elt) = label;
4137 break;
4140 case RETURN_EXPR:
4141 bsi_remove (&bsi, true);
4142 e->flags |= EDGE_FALLTHRU;
4143 break;
4145 default:
4146 /* Otherwise it must be a fallthru edge, and we don't need to
4147 do anything besides redirecting it. */
4148 gcc_assert (e->flags & EDGE_FALLTHRU);
4149 break;
4152 /* Update/insert PHI nodes as necessary. */
4154 /* Now update the edges in the CFG. */
4155 e = ssa_redirect_edge (e, dest);
4157 return e;
4160 /* Returns true if it is possible to remove edge E by redirecting
4161 it to the destination of the other edge from E->src. */
4163 static bool
4164 tree_can_remove_branch_p (edge e)
4166 if (e->flags & EDGE_ABNORMAL)
4167 return false;
4169 return true;
4172 /* Simple wrapper, as we can always redirect fallthru edges. */
4174 static basic_block
4175 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4177 e = tree_redirect_edge_and_branch (e, dest);
4178 gcc_assert (e);
4180 return NULL;
4184 /* Splits basic block BB after statement STMT (but at least after the
4185 labels). If STMT is NULL, BB is split just after the labels. */
4187 static basic_block
4188 tree_split_block (basic_block bb, void *stmt)
4190 block_stmt_iterator bsi;
4191 tree_stmt_iterator tsi_tgt;
4192 tree act;
4193 basic_block new_bb;
4194 edge e;
4195 edge_iterator ei;
4197 new_bb = create_empty_bb (bb);
4199 /* Redirect the outgoing edges. */
4200 new_bb->succs = bb->succs;
4201 bb->succs = NULL;
4202 FOR_EACH_EDGE (e, ei, new_bb->succs)
4203 e->src = new_bb;
4205 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4206 stmt = NULL;
4208 /* Move everything from BSI to the new basic block. */
4209 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4211 act = bsi_stmt (bsi);
4212 if (TREE_CODE (act) == LABEL_EXPR)
4213 continue;
4215 if (!stmt)
4216 break;
4218 if (stmt == act)
4220 bsi_next (&bsi);
4221 break;
4225 if (bsi_end_p (bsi))
4226 return new_bb;
4228 /* Split the statement list - avoid re-creating new containers as this
4229 brings ugly quadratic memory consumption in the inliner.
4230 (We are still quadratic since we need to update stmt BB pointers,
4231 sadly.) */
4232 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4233 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4234 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4235 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4237 return new_bb;
4241 /* Moves basic block BB after block AFTER. */
4243 static bool
4244 tree_move_block_after (basic_block bb, basic_block after)
4246 if (bb->prev_bb == after)
4247 return true;
4249 unlink_block (bb);
4250 link_block (bb, after);
4252 return true;
4256 /* Return true if basic_block can be duplicated. */
4258 static bool
4259 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4261 return true;
4265 /* Create a duplicate of the basic block BB. NOTE: This does not
4266 preserve SSA form. */
4268 static basic_block
4269 tree_duplicate_bb (basic_block bb)
4271 basic_block new_bb;
4272 block_stmt_iterator bsi, bsi_tgt;
4273 tree phi;
4275 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4277 /* Copy the PHI nodes. We ignore PHI node arguments here because
4278 the incoming edges have not been setup yet. */
4279 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4281 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4282 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4285 /* Keep the chain of PHI nodes in the same order so that they can be
4286 updated by ssa_redirect_edge. */
4287 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4289 bsi_tgt = bsi_start (new_bb);
4290 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4292 def_operand_p def_p;
4293 ssa_op_iter op_iter;
4294 tree stmt, copy;
4295 int region;
4297 stmt = bsi_stmt (bsi);
4298 if (TREE_CODE (stmt) == LABEL_EXPR)
4299 continue;
4301 /* Create a new copy of STMT and duplicate STMT's virtual
4302 operands. */
4303 copy = unshare_expr (stmt);
4304 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4305 copy_virtual_operands (copy, stmt);
4306 region = lookup_stmt_eh_region (stmt);
4307 if (region >= 0)
4308 add_stmt_to_eh_region (copy, region);
4309 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4311 /* Create new names for all the definitions created by COPY and
4312 add replacement mappings for each new name. */
4313 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4314 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4317 return new_bb;
4321 /* Basic block BB_COPY was created by code duplication. Add phi node
4322 arguments for edges going out of BB_COPY. The blocks that were
4323 duplicated have BB_DUPLICATED set. */
4325 void
4326 add_phi_args_after_copy_bb (basic_block bb_copy)
4328 basic_block bb, dest;
4329 edge e, e_copy;
4330 edge_iterator ei;
4331 tree phi, phi_copy, phi_next, def;
4333 bb = get_bb_original (bb_copy);
4335 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4337 if (!phi_nodes (e_copy->dest))
4338 continue;
4340 if (e_copy->dest->flags & BB_DUPLICATED)
4341 dest = get_bb_original (e_copy->dest);
4342 else
4343 dest = e_copy->dest;
4345 e = find_edge (bb, dest);
4346 if (!e)
4348 /* During loop unrolling the target of the latch edge is copied.
4349 In this case we are not looking for edge to dest, but to
4350 duplicated block whose original was dest. */
4351 FOR_EACH_EDGE (e, ei, bb->succs)
4352 if ((e->dest->flags & BB_DUPLICATED)
4353 && get_bb_original (e->dest) == dest)
4354 break;
4356 gcc_assert (e != NULL);
4359 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4360 phi;
4361 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4363 phi_next = PHI_CHAIN (phi);
4364 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4365 add_phi_arg (phi_copy, def, e_copy);
4370 /* Blocks in REGION_COPY array of length N_REGION were created by
4371 duplication of basic blocks. Add phi node arguments for edges
4372 going from these blocks. */
4374 void
4375 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4377 unsigned i;
4379 for (i = 0; i < n_region; i++)
4380 region_copy[i]->flags |= BB_DUPLICATED;
4382 for (i = 0; i < n_region; i++)
4383 add_phi_args_after_copy_bb (region_copy[i]);
4385 for (i = 0; i < n_region; i++)
4386 region_copy[i]->flags &= ~BB_DUPLICATED;
4389 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4390 important exit edge EXIT. By important we mean that no SSA name defined
4391 inside region is live over the other exit edges of the region. All entry
4392 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4393 to the duplicate of the region. SSA form, dominance and loop information
4394 is updated. The new basic blocks are stored to REGION_COPY in the same
4395 order as they had in REGION, provided that REGION_COPY is not NULL.
4396 The function returns false if it is unable to copy the region,
4397 true otherwise. */
4399 bool
4400 tree_duplicate_sese_region (edge entry, edge exit,
4401 basic_block *region, unsigned n_region,
4402 basic_block *region_copy)
4404 unsigned i, n_doms;
4405 bool free_region_copy = false, copying_header = false;
4406 struct loop *loop = entry->dest->loop_father;
4407 edge exit_copy;
4408 basic_block *doms;
4409 edge redirected;
4410 int total_freq = 0, entry_freq = 0;
4411 gcov_type total_count = 0, entry_count = 0;
4413 if (!can_copy_bbs_p (region, n_region))
4414 return false;
4416 /* Some sanity checking. Note that we do not check for all possible
4417 missuses of the functions. I.e. if you ask to copy something weird,
4418 it will work, but the state of structures probably will not be
4419 correct. */
4420 for (i = 0; i < n_region; i++)
4422 /* We do not handle subloops, i.e. all the blocks must belong to the
4423 same loop. */
4424 if (region[i]->loop_father != loop)
4425 return false;
4427 if (region[i] != entry->dest
4428 && region[i] == loop->header)
4429 return false;
4432 loop->copy = loop;
4434 /* In case the function is used for loop header copying (which is the primary
4435 use), ensure that EXIT and its copy will be new latch and entry edges. */
4436 if (loop->header == entry->dest)
4438 copying_header = true;
4439 loop->copy = loop->outer;
4441 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4442 return false;
4444 for (i = 0; i < n_region; i++)
4445 if (region[i] != exit->src
4446 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4447 return false;
4450 if (!region_copy)
4452 region_copy = XNEWVEC (basic_block, n_region);
4453 free_region_copy = true;
4456 gcc_assert (!need_ssa_update_p ());
4458 /* Record blocks outside the region that are dominated by something
4459 inside. */
4460 doms = XNEWVEC (basic_block, n_basic_blocks);
4461 initialize_original_copy_tables ();
4463 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4465 if (entry->dest->count)
4467 total_count = entry->dest->count;
4468 entry_count = entry->count;
4469 /* Fix up corner cases, to avoid division by zero or creation of negative
4470 frequencies. */
4471 if (entry_count > total_count)
4472 entry_count = total_count;
4474 else
4476 total_freq = entry->dest->frequency;
4477 entry_freq = EDGE_FREQUENCY (entry);
4478 /* Fix up corner cases, to avoid division by zero or creation of negative
4479 frequencies. */
4480 if (total_freq == 0)
4481 total_freq = 1;
4482 else if (entry_freq > total_freq)
4483 entry_freq = total_freq;
4486 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4487 split_edge_bb_loc (entry));
4488 if (total_count)
4490 scale_bbs_frequencies_gcov_type (region, n_region,
4491 total_count - entry_count,
4492 total_count);
4493 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4494 total_count);
4496 else
4498 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4499 total_freq);
4500 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4503 if (copying_header)
4505 loop->header = exit->dest;
4506 loop->latch = exit->src;
4509 /* Redirect the entry and add the phi node arguments. */
4510 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4511 gcc_assert (redirected != NULL);
4512 flush_pending_stmts (entry);
4514 /* Concerning updating of dominators: We must recount dominators
4515 for entry block and its copy. Anything that is outside of the
4516 region, but was dominated by something inside needs recounting as
4517 well. */
4518 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4519 doms[n_doms++] = get_bb_original (entry->dest);
4520 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4521 free (doms);
4523 /* Add the other PHI node arguments. */
4524 add_phi_args_after_copy (region_copy, n_region);
4526 /* Update the SSA web. */
4527 update_ssa (TODO_update_ssa);
4529 if (free_region_copy)
4530 free (region_copy);
4532 free_original_copy_tables ();
4533 return true;
4537 DEF_VEC_P(basic_block);
4538 DEF_VEC_ALLOC_P(basic_block,heap);
4541 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4542 adding blocks when the dominator traversal reaches EXIT. This
4543 function silently assumes that ENTRY strictly dominates EXIT. */
4545 static void
4546 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4547 VEC(basic_block,heap) **bbs_p)
4549 basic_block son;
4551 for (son = first_dom_son (CDI_DOMINATORS, entry);
4552 son;
4553 son = next_dom_son (CDI_DOMINATORS, son))
4555 VEC_safe_push (basic_block, heap, *bbs_p, son);
4556 if (son != exit)
4557 gather_blocks_in_sese_region (son, exit, bbs_p);
4562 struct move_stmt_d
4564 tree block;
4565 tree from_context;
4566 tree to_context;
4567 bitmap vars_to_remove;
4568 htab_t new_label_map;
4569 bool remap_decls_p;
4572 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4573 contained in *TP and change the DECL_CONTEXT of every local
4574 variable referenced in *TP. */
4576 static tree
4577 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4579 struct move_stmt_d *p = (struct move_stmt_d *) data;
4580 tree t = *tp;
4582 if (p->block
4583 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
4584 TREE_BLOCK (t) = p->block;
4586 if (OMP_DIRECTIVE_P (t)
4587 && TREE_CODE (t) != OMP_RETURN
4588 && TREE_CODE (t) != OMP_CONTINUE)
4590 /* Do not remap variables inside OMP directives. Variables
4591 referenced in clauses and directive header belong to the
4592 parent function and should not be moved into the child
4593 function. */
4594 bool save_remap_decls_p = p->remap_decls_p;
4595 p->remap_decls_p = false;
4596 *walk_subtrees = 0;
4598 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4600 p->remap_decls_p = save_remap_decls_p;
4602 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4604 if (TREE_CODE (t) == LABEL_DECL)
4606 if (p->new_label_map)
4608 struct tree_map in, *out;
4609 in.base.from = t;
4610 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4611 if (out)
4612 *tp = t = out->to;
4615 DECL_CONTEXT (t) = p->to_context;
4617 else if (p->remap_decls_p)
4619 DECL_CONTEXT (t) = p->to_context;
4621 if (TREE_CODE (t) == VAR_DECL)
4623 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4624 f->unexpanded_var_list
4625 = tree_cons (0, t, f->unexpanded_var_list);
4627 /* Mark T to be removed from the original function,
4628 otherwise it will be given a DECL_RTL when the
4629 original function is expanded. */
4630 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4634 else if (TYPE_P (t))
4635 *walk_subtrees = 0;
4637 return NULL_TREE;
4641 /* Move basic block BB from function CFUN to function DEST_FN. The
4642 block is moved out of the original linked list and placed after
4643 block AFTER in the new list. Also, the block is removed from the
4644 original array of blocks and placed in DEST_FN's array of blocks.
4645 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4646 updated to reflect the moved edges.
4648 On exit, local variables that need to be removed from
4649 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4651 static void
4652 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4653 basic_block after, bool update_edge_count_p,
4654 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4656 struct control_flow_graph *cfg;
4657 edge_iterator ei;
4658 edge e;
4659 block_stmt_iterator si;
4660 struct move_stmt_d d;
4661 unsigned old_len, new_len;
4663 /* Link BB to the new linked list. */
4664 move_block_after (bb, after);
4666 /* Update the edge count in the corresponding flowgraphs. */
4667 if (update_edge_count_p)
4668 FOR_EACH_EDGE (e, ei, bb->succs)
4670 cfun->cfg->x_n_edges--;
4671 dest_cfun->cfg->x_n_edges++;
4674 /* Remove BB from the original basic block array. */
4675 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4676 cfun->cfg->x_n_basic_blocks--;
4678 /* Grow DEST_CFUN's basic block array if needed. */
4679 cfg = dest_cfun->cfg;
4680 cfg->x_n_basic_blocks++;
4681 if (bb->index > cfg->x_last_basic_block)
4682 cfg->x_last_basic_block = bb->index;
4684 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4685 if ((unsigned) cfg->x_last_basic_block >= old_len)
4687 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4688 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
4689 new_len);
4692 VEC_replace (basic_block, cfg->x_basic_block_info,
4693 cfg->x_last_basic_block, bb);
4695 /* The statements in BB need to be associated with a new TREE_BLOCK.
4696 Labels need to be associated with a new label-to-block map. */
4697 memset (&d, 0, sizeof (d));
4698 d.vars_to_remove = vars_to_remove;
4700 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4702 tree stmt = bsi_stmt (si);
4703 int region;
4705 d.from_context = cfun->decl;
4706 d.to_context = dest_cfun->decl;
4707 d.remap_decls_p = true;
4708 d.new_label_map = new_label_map;
4709 if (TREE_BLOCK (stmt))
4710 d.block = DECL_INITIAL (dest_cfun->decl);
4712 walk_tree (&stmt, move_stmt_r, &d, NULL);
4714 if (TREE_CODE (stmt) == LABEL_EXPR)
4716 tree label = LABEL_EXPR_LABEL (stmt);
4717 int uid = LABEL_DECL_UID (label);
4719 gcc_assert (uid > -1);
4721 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4722 if (old_len <= (unsigned) uid)
4724 new_len = 3 * uid / 2;
4725 VEC_safe_grow_cleared (basic_block, gc,
4726 cfg->x_label_to_block_map, new_len);
4729 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4730 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4732 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4734 if (uid >= dest_cfun->last_label_uid)
4735 dest_cfun->last_label_uid = uid + 1;
4737 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4738 TREE_OPERAND (stmt, 0) =
4739 build_int_cst (NULL_TREE,
4740 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4741 + eh_offset);
4743 region = lookup_stmt_eh_region (stmt);
4744 if (region >= 0)
4746 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4747 remove_stmt_from_eh_region (stmt);
4748 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
4749 gimple_remove_stmt_histograms (cfun, stmt);
4754 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4755 the outermost EH region. Use REGION as the incoming base EH region. */
4757 static int
4758 find_outermost_region_in_block (struct function *src_cfun,
4759 basic_block bb, int region)
4761 block_stmt_iterator si;
4763 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4765 tree stmt = bsi_stmt (si);
4766 int stmt_region;
4768 if (TREE_CODE (stmt) == RESX_EXPR)
4769 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4770 else
4771 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4772 if (stmt_region > 0)
4774 if (region < 0)
4775 region = stmt_region;
4776 else if (stmt_region != region)
4778 region = eh_region_outermost (src_cfun, stmt_region, region);
4779 gcc_assert (region != -1);
4784 return region;
4787 static tree
4788 new_label_mapper (tree decl, void *data)
4790 htab_t hash = (htab_t) data;
4791 struct tree_map *m;
4792 void **slot;
4794 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4796 m = xmalloc (sizeof (struct tree_map));
4797 m->hash = DECL_UID (decl);
4798 m->base.from = decl;
4799 m->to = create_artificial_label ();
4800 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4802 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4803 gcc_assert (*slot == NULL);
4805 *slot = m;
4807 return m->to;
4810 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4811 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4812 single basic block in the original CFG and the new basic block is
4813 returned. DEST_CFUN must not have a CFG yet.
4815 Note that the region need not be a pure SESE region. Blocks inside
4816 the region may contain calls to abort/exit. The only restriction
4817 is that ENTRY_BB should be the only entry point and it must
4818 dominate EXIT_BB.
4820 All local variables referenced in the region are assumed to be in
4821 the corresponding BLOCK_VARS and unexpanded variable lists
4822 associated with DEST_CFUN. */
4824 basic_block
4825 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4826 basic_block exit_bb)
4828 VEC(basic_block,heap) *bbs;
4829 basic_block after, bb, *entry_pred, *exit_succ;
4830 struct function *saved_cfun;
4831 int *entry_flag, *exit_flag, eh_offset;
4832 unsigned i, num_entry_edges, num_exit_edges;
4833 edge e;
4834 edge_iterator ei;
4835 bitmap vars_to_remove;
4836 htab_t new_label_map;
4838 saved_cfun = cfun;
4840 /* Collect all the blocks in the region. Manually add ENTRY_BB
4841 because it won't be added by dfs_enumerate_from. */
4842 calculate_dominance_info (CDI_DOMINATORS);
4844 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4845 region. */
4846 gcc_assert (entry_bb != exit_bb
4847 && (!exit_bb
4848 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4850 bbs = NULL;
4851 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4852 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4854 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4855 the predecessor edges to ENTRY_BB and the successor edges to
4856 EXIT_BB so that we can re-attach them to the new basic block that
4857 will replace the region. */
4858 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4859 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4860 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4861 i = 0;
4862 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4864 entry_flag[i] = e->flags;
4865 entry_pred[i++] = e->src;
4866 remove_edge (e);
4869 if (exit_bb)
4871 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4872 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4873 sizeof (basic_block));
4874 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4875 i = 0;
4876 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4878 exit_flag[i] = e->flags;
4879 exit_succ[i++] = e->dest;
4880 remove_edge (e);
4883 else
4885 num_exit_edges = 0;
4886 exit_succ = NULL;
4887 exit_flag = NULL;
4890 /* Switch context to the child function to initialize DEST_FN's CFG. */
4891 gcc_assert (dest_cfun->cfg == NULL);
4892 cfun = dest_cfun;
4894 init_empty_tree_cfg ();
4896 /* Initialize EH information for the new function. */
4897 eh_offset = 0;
4898 new_label_map = NULL;
4899 if (saved_cfun->eh)
4901 int region = -1;
4903 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4904 region = find_outermost_region_in_block (saved_cfun, bb, region);
4906 init_eh_for_function ();
4907 if (region != -1)
4909 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4910 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4911 new_label_map, region, 0);
4915 cfun = saved_cfun;
4917 /* Move blocks from BBS into DEST_CFUN. */
4918 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4919 after = dest_cfun->cfg->x_entry_block_ptr;
4920 vars_to_remove = BITMAP_ALLOC (NULL);
4921 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4923 /* No need to update edge counts on the last block. It has
4924 already been updated earlier when we detached the region from
4925 the original CFG. */
4926 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4927 new_label_map, eh_offset);
4928 after = bb;
4931 if (new_label_map)
4932 htab_delete (new_label_map);
4934 /* Remove the variables marked in VARS_TO_REMOVE from
4935 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4936 DECL_RTL in the context of CFUN. */
4937 if (!bitmap_empty_p (vars_to_remove))
4939 tree *p;
4941 for (p = &cfun->unexpanded_var_list; *p; )
4943 tree var = TREE_VALUE (*p);
4944 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4946 *p = TREE_CHAIN (*p);
4947 continue;
4950 p = &TREE_CHAIN (*p);
4954 BITMAP_FREE (vars_to_remove);
4956 /* Rewire the entry and exit blocks. The successor to the entry
4957 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4958 the child function. Similarly, the predecessor of DEST_FN's
4959 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4960 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4961 various CFG manipulation function get to the right CFG.
4963 FIXME, this is silly. The CFG ought to become a parameter to
4964 these helpers. */
4965 cfun = dest_cfun;
4966 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4967 if (exit_bb)
4968 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4969 cfun = saved_cfun;
4971 /* Back in the original function, the SESE region has disappeared,
4972 create a new basic block in its place. */
4973 bb = create_empty_bb (entry_pred[0]);
4974 for (i = 0; i < num_entry_edges; i++)
4975 make_edge (entry_pred[i], bb, entry_flag[i]);
4977 for (i = 0; i < num_exit_edges; i++)
4978 make_edge (bb, exit_succ[i], exit_flag[i]);
4980 if (exit_bb)
4982 free (exit_flag);
4983 free (exit_succ);
4985 free (entry_flag);
4986 free (entry_pred);
4987 free_dominance_info (CDI_DOMINATORS);
4988 free_dominance_info (CDI_POST_DOMINATORS);
4989 VEC_free (basic_block, heap, bbs);
4991 return bb;
4995 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4997 void
4998 dump_function_to_file (tree fn, FILE *file, int flags)
5000 tree arg, vars, var;
5001 struct function *dsf;
5002 bool ignore_topmost_bind = false, any_var = false;
5003 basic_block bb;
5004 tree chain;
5005 struct function *saved_cfun;
5007 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5009 arg = DECL_ARGUMENTS (fn);
5010 while (arg)
5012 print_generic_expr (file, arg, dump_flags);
5013 if (TREE_CHAIN (arg))
5014 fprintf (file, ", ");
5015 arg = TREE_CHAIN (arg);
5017 fprintf (file, ")\n");
5019 dsf = DECL_STRUCT_FUNCTION (fn);
5020 if (dsf && (flags & TDF_DETAILS))
5021 dump_eh_tree (file, dsf);
5023 if (flags & TDF_RAW)
5025 dump_node (fn, TDF_SLIM | flags, file);
5026 return;
5029 /* Switch CFUN to point to FN. */
5030 saved_cfun = cfun;
5031 cfun = DECL_STRUCT_FUNCTION (fn);
5033 /* When GIMPLE is lowered, the variables are no longer available in
5034 BIND_EXPRs, so display them separately. */
5035 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5037 ignore_topmost_bind = true;
5039 fprintf (file, "{\n");
5040 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5042 var = TREE_VALUE (vars);
5044 print_generic_decl (file, var, flags);
5045 fprintf (file, "\n");
5047 any_var = true;
5051 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5053 /* Make a CFG based dump. */
5054 check_bb_profile (ENTRY_BLOCK_PTR, file);
5055 if (!ignore_topmost_bind)
5056 fprintf (file, "{\n");
5058 if (any_var && n_basic_blocks)
5059 fprintf (file, "\n");
5061 FOR_EACH_BB (bb)
5062 dump_generic_bb (file, bb, 2, flags);
5064 fprintf (file, "}\n");
5065 check_bb_profile (EXIT_BLOCK_PTR, file);
5067 else
5069 int indent;
5071 /* Make a tree based dump. */
5072 chain = DECL_SAVED_TREE (fn);
5074 if (chain && TREE_CODE (chain) == BIND_EXPR)
5076 if (ignore_topmost_bind)
5078 chain = BIND_EXPR_BODY (chain);
5079 indent = 2;
5081 else
5082 indent = 0;
5084 else
5086 if (!ignore_topmost_bind)
5087 fprintf (file, "{\n");
5088 indent = 2;
5091 if (any_var)
5092 fprintf (file, "\n");
5094 print_generic_stmt_indented (file, chain, flags, indent);
5095 if (ignore_topmost_bind)
5096 fprintf (file, "}\n");
5099 fprintf (file, "\n\n");
5101 /* Restore CFUN. */
5102 cfun = saved_cfun;
5106 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5108 void
5109 debug_function (tree fn, int flags)
5111 dump_function_to_file (fn, stderr, flags);
5115 /* Pretty print of the loops intermediate representation. */
5116 static void print_loop (FILE *, struct loop *, int);
5117 static void print_pred_bbs (FILE *, basic_block bb);
5118 static void print_succ_bbs (FILE *, basic_block bb);
5121 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5123 static void
5124 print_pred_bbs (FILE *file, basic_block bb)
5126 edge e;
5127 edge_iterator ei;
5129 FOR_EACH_EDGE (e, ei, bb->preds)
5130 fprintf (file, "bb_%d ", e->src->index);
5134 /* Print on FILE the indexes for the successors of basic_block BB. */
5136 static void
5137 print_succ_bbs (FILE *file, basic_block bb)
5139 edge e;
5140 edge_iterator ei;
5142 FOR_EACH_EDGE (e, ei, bb->succs)
5143 fprintf (file, "bb_%d ", e->dest->index);
5147 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5149 static void
5150 print_loop (FILE *file, struct loop *loop, int indent)
5152 char *s_indent;
5153 basic_block bb;
5155 if (loop == NULL)
5156 return;
5158 s_indent = (char *) alloca ((size_t) indent + 1);
5159 memset ((void *) s_indent, ' ', (size_t) indent);
5160 s_indent[indent] = '\0';
5162 /* Print the loop's header. */
5163 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5165 /* Print the loop's body. */
5166 fprintf (file, "%s{\n", s_indent);
5167 FOR_EACH_BB (bb)
5168 if (bb->loop_father == loop)
5170 /* Print the basic_block's header. */
5171 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5172 print_pred_bbs (file, bb);
5173 fprintf (file, "}, succs = {");
5174 print_succ_bbs (file, bb);
5175 fprintf (file, "})\n");
5177 /* Print the basic_block's body. */
5178 fprintf (file, "%s {\n", s_indent);
5179 tree_dump_bb (bb, file, indent + 4);
5180 fprintf (file, "%s }\n", s_indent);
5183 print_loop (file, loop->inner, indent + 2);
5184 fprintf (file, "%s}\n", s_indent);
5185 print_loop (file, loop->next, indent);
5189 /* Follow a CFG edge from the entry point of the program, and on entry
5190 of a loop, pretty print the loop structure on FILE. */
5192 void
5193 print_loop_ir (FILE *file)
5195 basic_block bb;
5197 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5198 if (bb && bb->loop_father)
5199 print_loop (file, bb->loop_father, 0);
5203 /* Debugging loops structure at tree level. */
5205 void
5206 debug_loop_ir (void)
5208 print_loop_ir (stderr);
5212 /* Return true if BB ends with a call, possibly followed by some
5213 instructions that must stay with the call. Return false,
5214 otherwise. */
5216 static bool
5217 tree_block_ends_with_call_p (basic_block bb)
5219 block_stmt_iterator bsi = bsi_last (bb);
5220 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5224 /* Return true if BB ends with a conditional branch. Return false,
5225 otherwise. */
5227 static bool
5228 tree_block_ends_with_condjump_p (basic_block bb)
5230 tree stmt = last_stmt (bb);
5231 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5235 /* Return true if we need to add fake edge to exit at statement T.
5236 Helper function for tree_flow_call_edges_add. */
5238 static bool
5239 need_fake_edge_p (tree t)
5241 tree call;
5243 /* NORETURN and LONGJMP calls already have an edge to exit.
5244 CONST and PURE calls do not need one.
5245 We don't currently check for CONST and PURE here, although
5246 it would be a good idea, because those attributes are
5247 figured out from the RTL in mark_constant_function, and
5248 the counter incrementation code from -fprofile-arcs
5249 leads to different results from -fbranch-probabilities. */
5250 call = get_call_expr_in (t);
5251 if (call
5252 && !(call_expr_flags (call) & ECF_NORETURN))
5253 return true;
5255 if (TREE_CODE (t) == ASM_EXPR
5256 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5257 return true;
5259 return false;
5263 /* Add fake edges to the function exit for any non constant and non
5264 noreturn calls, volatile inline assembly in the bitmap of blocks
5265 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5266 the number of blocks that were split.
5268 The goal is to expose cases in which entering a basic block does
5269 not imply that all subsequent instructions must be executed. */
5271 static int
5272 tree_flow_call_edges_add (sbitmap blocks)
5274 int i;
5275 int blocks_split = 0;
5276 int last_bb = last_basic_block;
5277 bool check_last_block = false;
5279 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5280 return 0;
5282 if (! blocks)
5283 check_last_block = true;
5284 else
5285 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5287 /* In the last basic block, before epilogue generation, there will be
5288 a fallthru edge to EXIT. Special care is required if the last insn
5289 of the last basic block is a call because make_edge folds duplicate
5290 edges, which would result in the fallthru edge also being marked
5291 fake, which would result in the fallthru edge being removed by
5292 remove_fake_edges, which would result in an invalid CFG.
5294 Moreover, we can't elide the outgoing fake edge, since the block
5295 profiler needs to take this into account in order to solve the minimal
5296 spanning tree in the case that the call doesn't return.
5298 Handle this by adding a dummy instruction in a new last basic block. */
5299 if (check_last_block)
5301 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5302 block_stmt_iterator bsi = bsi_last (bb);
5303 tree t = NULL_TREE;
5304 if (!bsi_end_p (bsi))
5305 t = bsi_stmt (bsi);
5307 if (t && need_fake_edge_p (t))
5309 edge e;
5311 e = find_edge (bb, EXIT_BLOCK_PTR);
5312 if (e)
5314 bsi_insert_on_edge (e, build_empty_stmt ());
5315 bsi_commit_edge_inserts ();
5320 /* Now add fake edges to the function exit for any non constant
5321 calls since there is no way that we can determine if they will
5322 return or not... */
5323 for (i = 0; i < last_bb; i++)
5325 basic_block bb = BASIC_BLOCK (i);
5326 block_stmt_iterator bsi;
5327 tree stmt, last_stmt;
5329 if (!bb)
5330 continue;
5332 if (blocks && !TEST_BIT (blocks, i))
5333 continue;
5335 bsi = bsi_last (bb);
5336 if (!bsi_end_p (bsi))
5338 last_stmt = bsi_stmt (bsi);
5341 stmt = bsi_stmt (bsi);
5342 if (need_fake_edge_p (stmt))
5344 edge e;
5345 /* The handling above of the final block before the
5346 epilogue should be enough to verify that there is
5347 no edge to the exit block in CFG already.
5348 Calling make_edge in such case would cause us to
5349 mark that edge as fake and remove it later. */
5350 #ifdef ENABLE_CHECKING
5351 if (stmt == last_stmt)
5353 e = find_edge (bb, EXIT_BLOCK_PTR);
5354 gcc_assert (e == NULL);
5356 #endif
5358 /* Note that the following may create a new basic block
5359 and renumber the existing basic blocks. */
5360 if (stmt != last_stmt)
5362 e = split_block (bb, stmt);
5363 if (e)
5364 blocks_split++;
5366 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5368 bsi_prev (&bsi);
5370 while (!bsi_end_p (bsi));
5374 if (blocks_split)
5375 verify_flow_info ();
5377 return blocks_split;
5380 /* Purge dead abnormal call edges from basic block BB. */
5382 bool
5383 tree_purge_dead_abnormal_call_edges (basic_block bb)
5385 bool changed = tree_purge_dead_eh_edges (bb);
5387 if (current_function_has_nonlocal_label)
5389 tree stmt = last_stmt (bb);
5390 edge_iterator ei;
5391 edge e;
5393 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5394 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5396 if (e->flags & EDGE_ABNORMAL)
5398 remove_edge (e);
5399 changed = true;
5401 else
5402 ei_next (&ei);
5405 /* See tree_purge_dead_eh_edges below. */
5406 if (changed)
5407 free_dominance_info (CDI_DOMINATORS);
5410 return changed;
5413 /* Purge dead EH edges from basic block BB. */
5415 bool
5416 tree_purge_dead_eh_edges (basic_block bb)
5418 bool changed = false;
5419 edge e;
5420 edge_iterator ei;
5421 tree stmt = last_stmt (bb);
5423 if (stmt && tree_can_throw_internal (stmt))
5424 return false;
5426 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5428 if (e->flags & EDGE_EH)
5430 remove_edge (e);
5431 changed = true;
5433 else
5434 ei_next (&ei);
5437 /* Removal of dead EH edges might change dominators of not
5438 just immediate successors. E.g. when bb1 is changed so that
5439 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5440 eh edges purged by this function in:
5444 1-->2
5445 / \ |
5446 v v |
5447 3-->4 |
5449 --->5
5452 idom(bb5) must be recomputed. For now just free the dominance
5453 info. */
5454 if (changed)
5455 free_dominance_info (CDI_DOMINATORS);
5457 return changed;
5460 bool
5461 tree_purge_all_dead_eh_edges (bitmap blocks)
5463 bool changed = false;
5464 unsigned i;
5465 bitmap_iterator bi;
5467 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5469 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5472 return changed;
5475 /* This function is called whenever a new edge is created or
5476 redirected. */
5478 static void
5479 tree_execute_on_growing_pred (edge e)
5481 basic_block bb = e->dest;
5483 if (phi_nodes (bb))
5484 reserve_phi_args_for_new_edge (bb);
5487 /* This function is called immediately before edge E is removed from
5488 the edge vector E->dest->preds. */
5490 static void
5491 tree_execute_on_shrinking_pred (edge e)
5493 if (phi_nodes (e->dest))
5494 remove_phi_args (e);
5497 /*---------------------------------------------------------------------------
5498 Helper functions for Loop versioning
5499 ---------------------------------------------------------------------------*/
5501 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5502 of 'first'. Both of them are dominated by 'new_head' basic block. When
5503 'new_head' was created by 'second's incoming edge it received phi arguments
5504 on the edge by split_edge(). Later, additional edge 'e' was created to
5505 connect 'new_head' and 'first'. Now this routine adds phi args on this
5506 additional edge 'e' that new_head to second edge received as part of edge
5507 splitting.
5510 static void
5511 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5512 basic_block new_head, edge e)
5514 tree phi1, phi2;
5515 edge e2 = find_edge (new_head, second);
5517 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5518 edge, we should always have an edge from NEW_HEAD to SECOND. */
5519 gcc_assert (e2 != NULL);
5521 /* Browse all 'second' basic block phi nodes and add phi args to
5522 edge 'e' for 'first' head. PHI args are always in correct order. */
5524 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5525 phi2 && phi1;
5526 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5528 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5529 add_phi_arg (phi1, def, e);
5533 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5534 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5535 the destination of the ELSE part. */
5536 static void
5537 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5538 basic_block cond_bb, void *cond_e)
5540 block_stmt_iterator bsi;
5541 tree goto1 = NULL_TREE;
5542 tree goto2 = NULL_TREE;
5543 tree new_cond_expr = NULL_TREE;
5544 tree cond_expr = (tree) cond_e;
5545 edge e0;
5547 /* Build new conditional expr */
5548 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5549 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5550 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5552 /* Add new cond in cond_bb. */
5553 bsi = bsi_start (cond_bb);
5554 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5555 /* Adjust edges appropriately to connect new head with first head
5556 as well as second head. */
5557 e0 = single_succ_edge (cond_bb);
5558 e0->flags &= ~EDGE_FALLTHRU;
5559 e0->flags |= EDGE_FALSE_VALUE;
5562 struct cfg_hooks tree_cfg_hooks = {
5563 "tree",
5564 tree_verify_flow_info,
5565 tree_dump_bb, /* dump_bb */
5566 create_bb, /* create_basic_block */
5567 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5568 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5569 tree_can_remove_branch_p, /* can_remove_branch_p */
5570 remove_bb, /* delete_basic_block */
5571 tree_split_block, /* split_block */
5572 tree_move_block_after, /* move_block_after */
5573 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5574 tree_merge_blocks, /* merge_blocks */
5575 tree_predict_edge, /* predict_edge */
5576 tree_predicted_by_p, /* predicted_by_p */
5577 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5578 tree_duplicate_bb, /* duplicate_block */
5579 tree_split_edge, /* split_edge */
5580 tree_make_forwarder_block, /* make_forward_block */
5581 NULL, /* tidy_fallthru_edge */
5582 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5583 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5584 tree_flow_call_edges_add, /* flow_call_edges_add */
5585 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5586 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5587 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5588 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5589 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5590 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5591 flush_pending_stmts /* flush_pending_stmts */
5595 /* Split all critical edges. */
5597 static unsigned int
5598 split_critical_edges (void)
5600 basic_block bb;
5601 edge e;
5602 edge_iterator ei;
5604 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5605 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5606 mappings around the calls to split_edge. */
5607 start_recording_case_labels ();
5608 FOR_ALL_BB (bb)
5610 FOR_EACH_EDGE (e, ei, bb->succs)
5611 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5613 split_edge (e);
5616 end_recording_case_labels ();
5617 return 0;
5620 struct tree_opt_pass pass_split_crit_edges =
5622 "crited", /* name */
5623 NULL, /* gate */
5624 split_critical_edges, /* execute */
5625 NULL, /* sub */
5626 NULL, /* next */
5627 0, /* static_pass_number */
5628 TV_TREE_SPLIT_EDGES, /* tv_id */
5629 PROP_cfg, /* properties required */
5630 PROP_no_crit_edges, /* properties_provided */
5631 0, /* properties_destroyed */
5632 0, /* todo_flags_start */
5633 TODO_dump_func, /* todo_flags_finish */
5634 0 /* letter */
5638 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5639 a temporary, make sure and register it to be renamed if necessary,
5640 and finally return the temporary. Put the statements to compute
5641 EXP before the current statement in BSI. */
5643 tree
5644 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5646 tree t, new_stmt, orig_stmt;
5648 if (is_gimple_val (exp))
5649 return exp;
5651 t = make_rename_temp (type, NULL);
5652 new_stmt = build_gimple_modify_stmt (t, exp);
5654 orig_stmt = bsi_stmt (*bsi);
5655 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5656 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5658 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5659 if (gimple_in_ssa_p (cfun))
5660 mark_symbols_for_renaming (new_stmt);
5662 return t;
5665 /* Build a ternary operation and gimplify it. Emit code before BSI.
5666 Return the gimple_val holding the result. */
5668 tree
5669 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5670 tree type, tree a, tree b, tree c)
5672 tree ret;
5674 ret = fold_build3 (code, type, a, b, c);
5675 STRIP_NOPS (ret);
5677 return gimplify_val (bsi, type, ret);
5680 /* Build a binary operation and gimplify it. Emit code before BSI.
5681 Return the gimple_val holding the result. */
5683 tree
5684 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5685 tree type, tree a, tree b)
5687 tree ret;
5689 ret = fold_build2 (code, type, a, b);
5690 STRIP_NOPS (ret);
5692 return gimplify_val (bsi, type, ret);
5695 /* Build a unary operation and gimplify it. Emit code before BSI.
5696 Return the gimple_val holding the result. */
5698 tree
5699 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5700 tree a)
5702 tree ret;
5704 ret = fold_build1 (code, type, a);
5705 STRIP_NOPS (ret);
5707 return gimplify_val (bsi, type, ret);
5712 /* Emit return warnings. */
5714 static unsigned int
5715 execute_warn_function_return (void)
5717 #ifdef USE_MAPPED_LOCATION
5718 source_location location;
5719 #else
5720 location_t *locus;
5721 #endif
5722 tree last;
5723 edge e;
5724 edge_iterator ei;
5726 /* If we have a path to EXIT, then we do return. */
5727 if (TREE_THIS_VOLATILE (cfun->decl)
5728 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5730 #ifdef USE_MAPPED_LOCATION
5731 location = UNKNOWN_LOCATION;
5732 #else
5733 locus = NULL;
5734 #endif
5735 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5737 last = last_stmt (e->src);
5738 if (TREE_CODE (last) == RETURN_EXPR
5739 #ifdef USE_MAPPED_LOCATION
5740 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5741 #else
5742 && (locus = EXPR_LOCUS (last)) != NULL)
5743 #endif
5744 break;
5746 #ifdef USE_MAPPED_LOCATION
5747 if (location == UNKNOWN_LOCATION)
5748 location = cfun->function_end_locus;
5749 warning (0, "%H%<noreturn%> function does return", &location);
5750 #else
5751 if (!locus)
5752 locus = &cfun->function_end_locus;
5753 warning (0, "%H%<noreturn%> function does return", locus);
5754 #endif
5757 /* If we see "return;" in some basic block, then we do reach the end
5758 without returning a value. */
5759 else if (warn_return_type
5760 && !TREE_NO_WARNING (cfun->decl)
5761 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5762 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5764 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5766 tree last = last_stmt (e->src);
5767 if (TREE_CODE (last) == RETURN_EXPR
5768 && TREE_OPERAND (last, 0) == NULL
5769 && !TREE_NO_WARNING (last))
5771 #ifdef USE_MAPPED_LOCATION
5772 location = EXPR_LOCATION (last);
5773 if (location == UNKNOWN_LOCATION)
5774 location = cfun->function_end_locus;
5775 warning (0, "%Hcontrol reaches end of non-void function", &location);
5776 #else
5777 locus = EXPR_LOCUS (last);
5778 if (!locus)
5779 locus = &cfun->function_end_locus;
5780 warning (0, "%Hcontrol reaches end of non-void function", locus);
5781 #endif
5782 TREE_NO_WARNING (cfun->decl) = 1;
5783 break;
5787 return 0;
5791 /* Given a basic block B which ends with a conditional and has
5792 precisely two successors, determine which of the edges is taken if
5793 the conditional is true and which is taken if the conditional is
5794 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5796 void
5797 extract_true_false_edges_from_block (basic_block b,
5798 edge *true_edge,
5799 edge *false_edge)
5801 edge e = EDGE_SUCC (b, 0);
5803 if (e->flags & EDGE_TRUE_VALUE)
5805 *true_edge = e;
5806 *false_edge = EDGE_SUCC (b, 1);
5808 else
5810 *false_edge = e;
5811 *true_edge = EDGE_SUCC (b, 1);
5815 struct tree_opt_pass pass_warn_function_return =
5817 NULL, /* name */
5818 NULL, /* gate */
5819 execute_warn_function_return, /* execute */
5820 NULL, /* sub */
5821 NULL, /* next */
5822 0, /* static_pass_number */
5823 0, /* tv_id */
5824 PROP_cfg, /* properties_required */
5825 0, /* properties_provided */
5826 0, /* properties_destroyed */
5827 0, /* todo_flags_start */
5828 0, /* todo_flags_finish */
5829 0 /* letter */
5832 /* Emit noreturn warnings. */
5834 static unsigned int
5835 execute_warn_function_noreturn (void)
5837 if (warn_missing_noreturn
5838 && !TREE_THIS_VOLATILE (cfun->decl)
5839 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5840 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5841 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5842 "for attribute %<noreturn%>",
5843 cfun->decl);
5844 return 0;
5847 struct tree_opt_pass pass_warn_function_noreturn =
5849 NULL, /* name */
5850 NULL, /* gate */
5851 execute_warn_function_noreturn, /* execute */
5852 NULL, /* sub */
5853 NULL, /* next */
5854 0, /* static_pass_number */
5855 0, /* tv_id */
5856 PROP_cfg, /* properties_required */
5857 0, /* properties_provided */
5858 0, /* properties_destroyed */
5859 0, /* todo_flags_start */
5860 0, /* todo_flags_finish */
5861 0 /* letter */