PR c++/29886
[official-gcc.git] / gcc / tree-cfg.c
blob34f41934303e1d374501c7016ad5bd30bc9ca94d
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 struct edge_to_cases_elt
73 /* The edge itself. Necessary for hashing and equality tests. */
74 edge e;
76 /* The case labels associated with this edge. We link these up via
77 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78 when we destroy the hash table. This prevents problems when copying
79 SWITCH_EXPRs. */
80 tree case_labels;
83 static htab_t edge_to_cases;
85 /* CFG statistics. */
86 struct cfg_stats_d
88 long num_merged_labels;
91 static struct cfg_stats_d cfg_stats;
93 /* Nonzero if we found a computed goto while building basic blocks. */
94 static bool found_computed_goto;
96 /* Basic blocks and flowgraphs. */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
115 static inline void change_bb_for_stmt (tree t, basic_block bb);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static unsigned int
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 return 0;
234 struct tree_opt_pass pass_build_cfg =
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts, /* todo_flags_finish */
248 0 /* letter */
251 /* Search the CFG for any computed gotos. If found, factor them to a
252 common computed goto site. Also record the location of that site so
253 that we can un-factor the gotos after we have converted back to
254 normal form. */
256 static void
257 factor_computed_gotos (void)
259 basic_block bb;
260 tree factored_label_decl = NULL;
261 tree var = NULL;
262 tree factored_computed_goto_label = NULL;
263 tree factored_computed_goto = NULL;
265 /* We know there are one or more computed gotos in this function.
266 Examine the last statement in each basic block to see if the block
267 ends with a computed goto. */
269 FOR_EACH_BB (bb)
271 block_stmt_iterator bsi = bsi_last (bb);
272 tree last;
274 if (bsi_end_p (bsi))
275 continue;
276 last = bsi_stmt (bsi);
278 /* Ignore the computed goto we create when we factor the original
279 computed gotos. */
280 if (last == factored_computed_goto)
281 continue;
283 /* If the last statement is a computed goto, factor it. */
284 if (computed_goto_p (last))
286 tree assignment;
288 /* The first time we find a computed goto we need to create
289 the factored goto block and the variable each original
290 computed goto will use for their goto destination. */
291 if (! factored_computed_goto)
293 basic_block new_bb = create_empty_bb (bb);
294 block_stmt_iterator new_bsi = bsi_start (new_bb);
296 /* Create the destination of the factored goto. Each original
297 computed goto will put its desired destination into this
298 variable and jump to the label we create immediately
299 below. */
300 var = create_tmp_var (ptr_type_node, "gotovar");
302 /* Build a label for the new block which will contain the
303 factored computed goto. */
304 factored_label_decl = create_artificial_label ();
305 factored_computed_goto_label
306 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 BSI_NEW_STMT);
310 /* Build our new computed goto. */
311 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 bsi_insert_after (&new_bsi, factored_computed_goto,
313 BSI_NEW_STMT);
316 /* Copy the original computed goto's destination into VAR. */
317 assignment = build2 (MODIFY_EXPR, ptr_type_node,
318 var, GOTO_DESTINATION (last));
319 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
321 /* And re-vector the computed goto to the new destination. */
322 GOTO_DESTINATION (last) = factored_label_decl;
328 /* Build a flowgraph for the statement_list STMT_LIST. */
330 static void
331 make_blocks (tree stmt_list)
333 tree_stmt_iterator i = tsi_start (stmt_list);
334 tree stmt = NULL;
335 bool start_new_block = true;
336 bool first_stmt_of_list = true;
337 basic_block bb = ENTRY_BLOCK_PTR;
339 while (!tsi_end_p (i))
341 tree prev_stmt;
343 prev_stmt = stmt;
344 stmt = tsi_stmt (i);
346 /* If the statement starts a new basic block or if we have determined
347 in a previous pass that we need to create a new block for STMT, do
348 so now. */
349 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
351 if (!first_stmt_of_list)
352 stmt_list = tsi_split_statement_list_before (&i);
353 bb = create_basic_block (stmt_list, NULL, bb);
354 start_new_block = false;
357 /* Now add STMT to BB and create the subgraphs for special statement
358 codes. */
359 set_bb_for_stmt (stmt, bb);
361 if (computed_goto_p (stmt))
362 found_computed_goto = true;
364 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 next iteration. */
366 if (stmt_ends_bb_p (stmt))
367 start_new_block = true;
369 tsi_next (&i);
370 first_stmt_of_list = false;
375 /* Create and return a new empty basic block after bb AFTER. */
377 static basic_block
378 create_bb (void *h, void *e, basic_block after)
380 basic_block bb;
382 gcc_assert (!e);
384 /* Create and initialize a new basic block. Since alloc_block uses
385 ggc_alloc_cleared to allocate a basic block, we do not have to
386 clear the newly allocated basic block here. */
387 bb = alloc_block ();
389 bb->index = last_basic_block;
390 bb->flags = BB_NEW;
391 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
399 size_t old_size = VEC_length (basic_block, basic_block_info);
400 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401 basic_block *p;
402 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403 p = VEC_address (basic_block, basic_block_info);
404 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
407 /* Add the newly created block to the array. */
408 SET_BASIC_BLOCK (last_basic_block, bb);
410 n_basic_blocks++;
411 last_basic_block++;
413 return bb;
417 /*---------------------------------------------------------------------------
418 Edge creation
419 ---------------------------------------------------------------------------*/
421 /* Fold COND_EXPR_COND of each COND_EXPR. */
423 void
424 fold_cond_expr_cond (void)
426 basic_block bb;
428 FOR_EACH_BB (bb)
430 tree stmt = last_stmt (bb);
432 if (stmt
433 && TREE_CODE (stmt) == COND_EXPR)
435 tree cond = fold (COND_EXPR_COND (stmt));
436 if (integer_zerop (cond))
437 COND_EXPR_COND (stmt) = boolean_false_node;
438 else if (integer_onep (cond))
439 COND_EXPR_COND (stmt) = boolean_true_node;
444 /* Join all the blocks in the flowgraph. */
446 static void
447 make_edges (void)
449 basic_block bb;
450 struct omp_region *cur_region = NULL;
452 /* Create an edge from entry to the first block with executable
453 statements in it. */
454 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
456 /* Traverse the basic block array placing edges. */
457 FOR_EACH_BB (bb)
459 tree last = last_stmt (bb);
460 bool fallthru;
462 if (last)
464 enum tree_code code = TREE_CODE (last);
465 switch (code)
467 case GOTO_EXPR:
468 make_goto_expr_edges (bb);
469 fallthru = false;
470 break;
471 case RETURN_EXPR:
472 make_edge (bb, EXIT_BLOCK_PTR, 0);
473 fallthru = false;
474 break;
475 case COND_EXPR:
476 make_cond_expr_edges (bb);
477 fallthru = false;
478 break;
479 case SWITCH_EXPR:
480 make_switch_expr_edges (bb);
481 fallthru = false;
482 break;
483 case RESX_EXPR:
484 make_eh_edges (last);
485 fallthru = false;
486 break;
488 case CALL_EXPR:
489 /* If this function receives a nonlocal goto, then we need to
490 make edges from this call site to all the nonlocal goto
491 handlers. */
492 if (tree_can_make_abnormal_goto (last))
493 make_abnormal_goto_edges (bb, true);
495 /* If this statement has reachable exception handlers, then
496 create abnormal edges to them. */
497 make_eh_edges (last);
499 /* Some calls are known not to return. */
500 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
501 break;
503 case MODIFY_EXPR:
504 if (is_ctrl_altering_stmt (last))
506 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
507 CALL_EXPR may have an abnormal edge. Search the RHS for
508 this case and create any required edges. */
509 if (tree_can_make_abnormal_goto (last))
510 make_abnormal_goto_edges (bb, true);
512 make_eh_edges (last);
514 fallthru = true;
515 break;
517 case OMP_PARALLEL:
518 case OMP_FOR:
519 case OMP_SINGLE:
520 case OMP_MASTER:
521 case OMP_ORDERED:
522 case OMP_CRITICAL:
523 case OMP_SECTION:
524 cur_region = new_omp_region (bb, code, cur_region);
525 fallthru = true;
526 break;
528 case OMP_SECTIONS:
529 cur_region = new_omp_region (bb, code, cur_region);
530 fallthru = false;
531 break;
533 case OMP_RETURN:
534 /* In the case of an OMP_SECTION, the edge will go somewhere
535 other than the next block. This will be created later. */
536 cur_region->exit = bb;
537 fallthru = cur_region->type != OMP_SECTION;
538 cur_region = cur_region->outer;
539 break;
541 case OMP_CONTINUE:
542 cur_region->cont = bb;
543 switch (cur_region->type)
545 case OMP_FOR:
546 /* ??? Technically there should be a some sort of loopback
547 edge here, but it goes to a block that doesn't exist yet,
548 and without it, updating the ssa form would be a real
549 bear. Fortunately, we don't yet do ssa before expanding
550 these nodes. */
551 break;
553 case OMP_SECTIONS:
554 /* Wire up the edges into and out of the nested sections. */
555 /* ??? Similarly wrt loopback. */
557 struct omp_region *i;
558 for (i = cur_region->inner; i ; i = i->next)
560 gcc_assert (i->type == OMP_SECTION);
561 make_edge (cur_region->entry, i->entry, 0);
562 make_edge (i->exit, bb, EDGE_FALLTHRU);
565 break;
567 default:
568 gcc_unreachable ();
570 fallthru = true;
571 break;
573 default:
574 gcc_assert (!stmt_ends_bb_p (last));
575 fallthru = true;
578 else
579 fallthru = true;
581 if (fallthru)
582 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
585 if (root_omp_region)
586 free_omp_regions ();
588 /* Fold COND_EXPR_COND of each COND_EXPR. */
589 fold_cond_expr_cond ();
591 /* Clean up the graph and warn for unreachable code. */
592 cleanup_tree_cfg ();
596 /* Create the edges for a COND_EXPR starting at block BB.
597 At this point, both clauses must contain only simple gotos. */
599 static void
600 make_cond_expr_edges (basic_block bb)
602 tree entry = last_stmt (bb);
603 basic_block then_bb, else_bb;
604 tree then_label, else_label;
605 edge e;
607 gcc_assert (entry);
608 gcc_assert (TREE_CODE (entry) == COND_EXPR);
610 /* Entry basic blocks for each component. */
611 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
612 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
613 then_bb = label_to_block (then_label);
614 else_bb = label_to_block (else_label);
616 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
617 #ifdef USE_MAPPED_LOCATION
618 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
619 #else
620 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
621 #endif
622 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
623 if (e)
625 #ifdef USE_MAPPED_LOCATION
626 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
627 #else
628 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
629 #endif
633 /* Hashing routine for EDGE_TO_CASES. */
635 static hashval_t
636 edge_to_cases_hash (const void *p)
638 edge e = ((struct edge_to_cases_elt *)p)->e;
640 /* Hash on the edge itself (which is a pointer). */
641 return htab_hash_pointer (e);
644 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
645 for equality is just a pointer comparison. */
647 static int
648 edge_to_cases_eq (const void *p1, const void *p2)
650 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
651 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
653 return e1 == e2;
656 /* Called for each element in the hash table (P) as we delete the
657 edge to cases hash table.
659 Clear all the TREE_CHAINs to prevent problems with copying of
660 SWITCH_EXPRs and structure sharing rules, then free the hash table
661 element. */
663 static void
664 edge_to_cases_cleanup (void *p)
666 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
667 tree t, next;
669 for (t = elt->case_labels; t; t = next)
671 next = TREE_CHAIN (t);
672 TREE_CHAIN (t) = NULL;
674 free (p);
677 /* Start recording information mapping edges to case labels. */
679 void
680 start_recording_case_labels (void)
682 gcc_assert (edge_to_cases == NULL);
684 edge_to_cases = htab_create (37,
685 edge_to_cases_hash,
686 edge_to_cases_eq,
687 edge_to_cases_cleanup);
690 /* Return nonzero if we are recording information for case labels. */
692 static bool
693 recording_case_labels_p (void)
695 return (edge_to_cases != NULL);
698 /* Stop recording information mapping edges to case labels and
699 remove any information we have recorded. */
700 void
701 end_recording_case_labels (void)
703 htab_delete (edge_to_cases);
704 edge_to_cases = NULL;
707 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
709 static void
710 record_switch_edge (edge e, tree case_label)
712 struct edge_to_cases_elt *elt;
713 void **slot;
715 /* Build a hash table element so we can see if E is already
716 in the table. */
717 elt = XNEW (struct edge_to_cases_elt);
718 elt->e = e;
719 elt->case_labels = case_label;
721 slot = htab_find_slot (edge_to_cases, elt, INSERT);
723 if (*slot == NULL)
725 /* E was not in the hash table. Install E into the hash table. */
726 *slot = (void *)elt;
728 else
730 /* E was already in the hash table. Free ELT as we do not need it
731 anymore. */
732 free (elt);
734 /* Get the entry stored in the hash table. */
735 elt = (struct edge_to_cases_elt *) *slot;
737 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
738 TREE_CHAIN (case_label) = elt->case_labels;
739 elt->case_labels = case_label;
743 /* If we are inside a {start,end}_recording_cases block, then return
744 a chain of CASE_LABEL_EXPRs from T which reference E.
746 Otherwise return NULL. */
748 static tree
749 get_cases_for_edge (edge e, tree t)
751 struct edge_to_cases_elt elt, *elt_p;
752 void **slot;
753 size_t i, n;
754 tree vec;
756 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
757 chains available. Return NULL so the caller can detect this case. */
758 if (!recording_case_labels_p ())
759 return NULL;
761 restart:
762 elt.e = e;
763 elt.case_labels = NULL;
764 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
766 if (slot)
768 elt_p = (struct edge_to_cases_elt *)*slot;
769 return elt_p->case_labels;
772 /* If we did not find E in the hash table, then this must be the first
773 time we have been queried for information about E & T. Add all the
774 elements from T to the hash table then perform the query again. */
776 vec = SWITCH_LABELS (t);
777 n = TREE_VEC_LENGTH (vec);
778 for (i = 0; i < n; i++)
780 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
781 basic_block label_bb = label_to_block (lab);
782 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
784 goto restart;
787 /* Create the edges for a SWITCH_EXPR starting at block BB.
788 At this point, the switch body has been lowered and the
789 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
791 static void
792 make_switch_expr_edges (basic_block bb)
794 tree entry = last_stmt (bb);
795 size_t i, n;
796 tree vec;
798 vec = SWITCH_LABELS (entry);
799 n = TREE_VEC_LENGTH (vec);
801 for (i = 0; i < n; ++i)
803 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
804 basic_block label_bb = label_to_block (lab);
805 make_edge (bb, label_bb, 0);
810 /* Return the basic block holding label DEST. */
812 basic_block
813 label_to_block_fn (struct function *ifun, tree dest)
815 int uid = LABEL_DECL_UID (dest);
817 /* We would die hard when faced by an undefined label. Emit a label to
818 the very first basic block. This will hopefully make even the dataflow
819 and undefined variable warnings quite right. */
820 if ((errorcount || sorrycount) && uid < 0)
822 block_stmt_iterator bsi =
823 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
824 tree stmt;
826 stmt = build1 (LABEL_EXPR, void_type_node, dest);
827 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
828 uid = LABEL_DECL_UID (dest);
830 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
831 <= (unsigned int) uid)
832 return NULL;
833 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
836 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
837 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
839 void
840 make_abnormal_goto_edges (basic_block bb, bool for_call)
842 basic_block target_bb;
843 block_stmt_iterator bsi;
845 FOR_EACH_BB (target_bb)
846 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
848 tree target = bsi_stmt (bsi);
850 if (TREE_CODE (target) != LABEL_EXPR)
851 break;
853 target = LABEL_EXPR_LABEL (target);
855 /* Make an edge to every label block that has been marked as a
856 potential target for a computed goto or a non-local goto. */
857 if ((FORCED_LABEL (target) && !for_call)
858 || (DECL_NONLOCAL (target) && for_call))
860 make_edge (bb, target_bb, EDGE_ABNORMAL);
861 break;
866 /* Create edges for a goto statement at block BB. */
868 static void
869 make_goto_expr_edges (basic_block bb)
871 block_stmt_iterator last = bsi_last (bb);
872 tree goto_t = bsi_stmt (last);
874 /* A simple GOTO creates normal edges. */
875 if (simple_goto_p (goto_t))
877 tree dest = GOTO_DESTINATION (goto_t);
878 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
879 #ifdef USE_MAPPED_LOCATION
880 e->goto_locus = EXPR_LOCATION (goto_t);
881 #else
882 e->goto_locus = EXPR_LOCUS (goto_t);
883 #endif
884 bsi_remove (&last, true);
885 return;
888 /* A computed GOTO creates abnormal edges. */
889 make_abnormal_goto_edges (bb, false);
893 /*---------------------------------------------------------------------------
894 Flowgraph analysis
895 ---------------------------------------------------------------------------*/
897 /* Cleanup useless labels in basic blocks. This is something we wish
898 to do early because it allows us to group case labels before creating
899 the edges for the CFG, and it speeds up block statement iterators in
900 all passes later on.
901 We only run this pass once, running it more than once is probably not
902 profitable. */
904 /* A map from basic block index to the leading label of that block. */
905 static tree *label_for_bb;
907 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
908 static void
909 update_eh_label (struct eh_region *region)
911 tree old_label = get_eh_region_tree_label (region);
912 if (old_label)
914 tree new_label;
915 basic_block bb = label_to_block (old_label);
917 /* ??? After optimizing, there may be EH regions with labels
918 that have already been removed from the function body, so
919 there is no basic block for them. */
920 if (! bb)
921 return;
923 new_label = label_for_bb[bb->index];
924 set_eh_region_tree_label (region, new_label);
928 /* Given LABEL return the first label in the same basic block. */
929 static tree
930 main_block_label (tree label)
932 basic_block bb = label_to_block (label);
934 /* label_to_block possibly inserted undefined label into the chain. */
935 if (!label_for_bb[bb->index])
936 label_for_bb[bb->index] = label;
937 return label_for_bb[bb->index];
940 /* Cleanup redundant labels. This is a three-step process:
941 1) Find the leading label for each block.
942 2) Redirect all references to labels to the leading labels.
943 3) Cleanup all useless labels. */
945 void
946 cleanup_dead_labels (void)
948 basic_block bb;
949 label_for_bb = XCNEWVEC (tree, last_basic_block);
951 /* Find a suitable label for each block. We use the first user-defined
952 label if there is one, or otherwise just the first label we see. */
953 FOR_EACH_BB (bb)
955 block_stmt_iterator i;
957 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
959 tree label, stmt = bsi_stmt (i);
961 if (TREE_CODE (stmt) != LABEL_EXPR)
962 break;
964 label = LABEL_EXPR_LABEL (stmt);
966 /* If we have not yet seen a label for the current block,
967 remember this one and see if there are more labels. */
968 if (! label_for_bb[bb->index])
970 label_for_bb[bb->index] = label;
971 continue;
974 /* If we did see a label for the current block already, but it
975 is an artificially created label, replace it if the current
976 label is a user defined label. */
977 if (! DECL_ARTIFICIAL (label)
978 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
980 label_for_bb[bb->index] = label;
981 break;
986 /* Now redirect all jumps/branches to the selected label.
987 First do so for each block ending in a control statement. */
988 FOR_EACH_BB (bb)
990 tree stmt = last_stmt (bb);
991 if (!stmt)
992 continue;
994 switch (TREE_CODE (stmt))
996 case COND_EXPR:
998 tree true_branch, false_branch;
1000 true_branch = COND_EXPR_THEN (stmt);
1001 false_branch = COND_EXPR_ELSE (stmt);
1003 GOTO_DESTINATION (true_branch)
1004 = main_block_label (GOTO_DESTINATION (true_branch));
1005 GOTO_DESTINATION (false_branch)
1006 = main_block_label (GOTO_DESTINATION (false_branch));
1008 break;
1011 case SWITCH_EXPR:
1013 size_t i;
1014 tree vec = SWITCH_LABELS (stmt);
1015 size_t n = TREE_VEC_LENGTH (vec);
1017 /* Replace all destination labels. */
1018 for (i = 0; i < n; ++i)
1020 tree elt = TREE_VEC_ELT (vec, i);
1021 tree label = main_block_label (CASE_LABEL (elt));
1022 CASE_LABEL (elt) = label;
1024 break;
1027 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1028 remove them until after we've created the CFG edges. */
1029 case GOTO_EXPR:
1030 if (! computed_goto_p (stmt))
1032 GOTO_DESTINATION (stmt)
1033 = main_block_label (GOTO_DESTINATION (stmt));
1034 break;
1037 default:
1038 break;
1042 for_each_eh_region (update_eh_label);
1044 /* Finally, purge dead labels. All user-defined labels and labels that
1045 can be the target of non-local gotos and labels which have their
1046 address taken are preserved. */
1047 FOR_EACH_BB (bb)
1049 block_stmt_iterator i;
1050 tree label_for_this_bb = label_for_bb[bb->index];
1052 if (! label_for_this_bb)
1053 continue;
1055 for (i = bsi_start (bb); !bsi_end_p (i); )
1057 tree label, stmt = bsi_stmt (i);
1059 if (TREE_CODE (stmt) != LABEL_EXPR)
1060 break;
1062 label = LABEL_EXPR_LABEL (stmt);
1064 if (label == label_for_this_bb
1065 || ! DECL_ARTIFICIAL (label)
1066 || DECL_NONLOCAL (label)
1067 || FORCED_LABEL (label))
1068 bsi_next (&i);
1069 else
1070 bsi_remove (&i, true);
1074 free (label_for_bb);
1077 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1078 and scan the sorted vector of cases. Combine the ones jumping to the
1079 same label.
1080 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1082 void
1083 group_case_labels (void)
1085 basic_block bb;
1087 FOR_EACH_BB (bb)
1089 tree stmt = last_stmt (bb);
1090 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1092 tree labels = SWITCH_LABELS (stmt);
1093 int old_size = TREE_VEC_LENGTH (labels);
1094 int i, j, new_size = old_size;
1095 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1096 tree default_label;
1098 /* The default label is always the last case in a switch
1099 statement after gimplification. */
1100 default_label = CASE_LABEL (default_case);
1102 /* Look for possible opportunities to merge cases.
1103 Ignore the last element of the label vector because it
1104 must be the default case. */
1105 i = 0;
1106 while (i < old_size - 1)
1108 tree base_case, base_label, base_high;
1109 base_case = TREE_VEC_ELT (labels, i);
1111 gcc_assert (base_case);
1112 base_label = CASE_LABEL (base_case);
1114 /* Discard cases that have the same destination as the
1115 default case. */
1116 if (base_label == default_label)
1118 TREE_VEC_ELT (labels, i) = NULL_TREE;
1119 i++;
1120 new_size--;
1121 continue;
1124 base_high = CASE_HIGH (base_case) ?
1125 CASE_HIGH (base_case) : CASE_LOW (base_case);
1126 i++;
1127 /* Try to merge case labels. Break out when we reach the end
1128 of the label vector or when we cannot merge the next case
1129 label with the current one. */
1130 while (i < old_size - 1)
1132 tree merge_case = TREE_VEC_ELT (labels, i);
1133 tree merge_label = CASE_LABEL (merge_case);
1134 tree t = int_const_binop (PLUS_EXPR, base_high,
1135 integer_one_node, 1);
1137 /* Merge the cases if they jump to the same place,
1138 and their ranges are consecutive. */
1139 if (merge_label == base_label
1140 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1142 base_high = CASE_HIGH (merge_case) ?
1143 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1144 CASE_HIGH (base_case) = base_high;
1145 TREE_VEC_ELT (labels, i) = NULL_TREE;
1146 new_size--;
1147 i++;
1149 else
1150 break;
1154 /* Compress the case labels in the label vector, and adjust the
1155 length of the vector. */
1156 for (i = 0, j = 0; i < new_size; i++)
1158 while (! TREE_VEC_ELT (labels, j))
1159 j++;
1160 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1162 TREE_VEC_LENGTH (labels) = new_size;
1167 /* Checks whether we can merge block B into block A. */
1169 static bool
1170 tree_can_merge_blocks_p (basic_block a, basic_block b)
1172 tree stmt;
1173 block_stmt_iterator bsi;
1174 tree phi;
1176 if (!single_succ_p (a))
1177 return false;
1179 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1180 return false;
1182 if (single_succ (a) != b)
1183 return false;
1185 if (!single_pred_p (b))
1186 return false;
1188 if (b == EXIT_BLOCK_PTR)
1189 return false;
1191 /* If A ends by a statement causing exceptions or something similar, we
1192 cannot merge the blocks. */
1193 stmt = last_stmt (a);
1194 if (stmt && stmt_ends_bb_p (stmt))
1195 return false;
1197 /* Do not allow a block with only a non-local label to be merged. */
1198 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1199 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1200 return false;
1202 /* It must be possible to eliminate all phi nodes in B. If ssa form
1203 is not up-to-date, we cannot eliminate any phis. */
1204 phi = phi_nodes (b);
1205 if (phi)
1207 if (need_ssa_update_p ())
1208 return false;
1210 for (; phi; phi = PHI_CHAIN (phi))
1211 if (!is_gimple_reg (PHI_RESULT (phi))
1212 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1213 return false;
1216 /* Do not remove user labels. */
1217 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1219 stmt = bsi_stmt (bsi);
1220 if (TREE_CODE (stmt) != LABEL_EXPR)
1221 break;
1222 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1223 return false;
1226 /* Protect the loop latches. */
1227 if (current_loops
1228 && b->loop_father->latch == b)
1229 return false;
1231 return true;
1234 /* Replaces all uses of NAME by VAL. */
1236 void
1237 replace_uses_by (tree name, tree val)
1239 imm_use_iterator imm_iter;
1240 use_operand_p use;
1241 tree stmt;
1242 edge e;
1243 unsigned i;
1246 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1248 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1250 replace_exp (use, val);
1252 if (TREE_CODE (stmt) == PHI_NODE)
1254 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1255 if (e->flags & EDGE_ABNORMAL)
1257 /* This can only occur for virtual operands, since
1258 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1259 would prevent replacement. */
1260 gcc_assert (!is_gimple_reg (name));
1261 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1265 if (TREE_CODE (stmt) != PHI_NODE)
1267 tree rhs;
1269 fold_stmt_inplace (stmt);
1270 rhs = get_rhs (stmt);
1271 if (TREE_CODE (rhs) == ADDR_EXPR)
1272 recompute_tree_invariant_for_addr_expr (rhs);
1274 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1275 mark_new_vars_to_rename (stmt);
1279 gcc_assert (num_imm_uses (name) == 0);
1281 /* Also update the trees stored in loop structures. */
1282 if (current_loops)
1284 struct loop *loop;
1286 for (i = 0; i < current_loops->num; i++)
1288 loop = current_loops->parray[i];
1289 if (loop)
1290 substitute_in_loop_info (loop, name, val);
1295 /* Merge block B into block A. */
1297 static void
1298 tree_merge_blocks (basic_block a, basic_block b)
1300 block_stmt_iterator bsi;
1301 tree_stmt_iterator last;
1302 tree phi;
1304 if (dump_file)
1305 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1309 bsi = bsi_last (a);
1310 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1312 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 tree copy;
1314 bool may_replace_uses = may_propagate_copy (def, use);
1316 /* In case we have loops to care about, do not propagate arguments of
1317 loop closed ssa phi nodes. */
1318 if (current_loops
1319 && is_gimple_reg (def)
1320 && TREE_CODE (use) == SSA_NAME
1321 && a->loop_father != b->loop_father)
1322 may_replace_uses = false;
1324 if (!may_replace_uses)
1326 gcc_assert (is_gimple_reg (def));
1328 /* Note that just emitting the copies is fine -- there is no problem
1329 with ordering of phi nodes. This is because A is the single
1330 predecessor of B, therefore results of the phi nodes cannot
1331 appear as arguments of the phi nodes. */
1332 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1333 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1334 SET_PHI_RESULT (phi, NULL_TREE);
1335 SSA_NAME_DEF_STMT (def) = copy;
1337 else
1338 replace_uses_by (def, use);
1340 remove_phi_node (phi, NULL);
1343 /* Ensure that B follows A. */
1344 move_block_after (b, a);
1346 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1347 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1349 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1350 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1352 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1354 tree label = bsi_stmt (bsi);
1356 bsi_remove (&bsi, false);
1357 /* Now that we can thread computed gotos, we might have
1358 a situation where we have a forced label in block B
1359 However, the label at the start of block B might still be
1360 used in other ways (think about the runtime checking for
1361 Fortran assigned gotos). So we can not just delete the
1362 label. Instead we move the label to the start of block A. */
1363 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1365 block_stmt_iterator dest_bsi = bsi_start (a);
1366 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1369 else
1371 change_bb_for_stmt (bsi_stmt (bsi), a);
1372 bsi_next (&bsi);
1376 /* Merge the chains. */
1377 last = tsi_last (a->stmt_list);
1378 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1379 b->stmt_list = NULL;
1383 /* Return the one of two successors of BB that is not reachable by a
1384 reached by a complex edge, if there is one. Else, return BB. We use
1385 this in optimizations that use post-dominators for their heuristics,
1386 to catch the cases in C++ where function calls are involved. */
1388 basic_block
1389 single_noncomplex_succ (basic_block bb)
1391 edge e0, e1;
1392 if (EDGE_COUNT (bb->succs) != 2)
1393 return bb;
1395 e0 = EDGE_SUCC (bb, 0);
1396 e1 = EDGE_SUCC (bb, 1);
1397 if (e0->flags & EDGE_COMPLEX)
1398 return e1->dest;
1399 if (e1->flags & EDGE_COMPLEX)
1400 return e0->dest;
1402 return bb;
1406 /* Walk the function tree removing unnecessary statements.
1408 * Empty statement nodes are removed
1410 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1412 * Unnecessary COND_EXPRs are removed
1414 * Some unnecessary BIND_EXPRs are removed
1416 Clearly more work could be done. The trick is doing the analysis
1417 and removal fast enough to be a net improvement in compile times.
1419 Note that when we remove a control structure such as a COND_EXPR
1420 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1421 to ensure we eliminate all the useless code. */
1423 struct rus_data
1425 tree *last_goto;
1426 bool repeat;
1427 bool may_throw;
1428 bool may_branch;
1429 bool has_label;
1432 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1434 static bool
1435 remove_useless_stmts_warn_notreached (tree stmt)
1437 if (EXPR_HAS_LOCATION (stmt))
1439 location_t loc = EXPR_LOCATION (stmt);
1440 if (LOCATION_LINE (loc) > 0)
1442 warning (0, "%Hwill never be executed", &loc);
1443 return true;
1447 switch (TREE_CODE (stmt))
1449 case STATEMENT_LIST:
1451 tree_stmt_iterator i;
1452 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1453 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1454 return true;
1456 break;
1458 case COND_EXPR:
1459 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1460 return true;
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1462 return true;
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1464 return true;
1465 break;
1467 case TRY_FINALLY_EXPR:
1468 case TRY_CATCH_EXPR:
1469 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1470 return true;
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1472 return true;
1473 break;
1475 case CATCH_EXPR:
1476 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1477 case EH_FILTER_EXPR:
1478 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1479 case BIND_EXPR:
1480 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1482 default:
1483 /* Not a live container. */
1484 break;
1487 return false;
1490 static void
1491 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1493 tree then_clause, else_clause, cond;
1494 bool save_has_label, then_has_label, else_has_label;
1496 save_has_label = data->has_label;
1497 data->has_label = false;
1498 data->last_goto = NULL;
1500 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1502 then_has_label = data->has_label;
1503 data->has_label = false;
1504 data->last_goto = NULL;
1506 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1508 else_has_label = data->has_label;
1509 data->has_label = save_has_label | then_has_label | else_has_label;
1511 then_clause = COND_EXPR_THEN (*stmt_p);
1512 else_clause = COND_EXPR_ELSE (*stmt_p);
1513 cond = fold (COND_EXPR_COND (*stmt_p));
1515 /* If neither arm does anything at all, we can remove the whole IF. */
1516 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1518 *stmt_p = build_empty_stmt ();
1519 data->repeat = true;
1522 /* If there are no reachable statements in an arm, then we can
1523 zap the entire conditional. */
1524 else if (integer_nonzerop (cond) && !else_has_label)
1526 if (warn_notreached)
1527 remove_useless_stmts_warn_notreached (else_clause);
1528 *stmt_p = then_clause;
1529 data->repeat = true;
1531 else if (integer_zerop (cond) && !then_has_label)
1533 if (warn_notreached)
1534 remove_useless_stmts_warn_notreached (then_clause);
1535 *stmt_p = else_clause;
1536 data->repeat = true;
1539 /* Check a couple of simple things on then/else with single stmts. */
1540 else
1542 tree then_stmt = expr_only (then_clause);
1543 tree else_stmt = expr_only (else_clause);
1545 /* Notice branches to a common destination. */
1546 if (then_stmt && else_stmt
1547 && TREE_CODE (then_stmt) == GOTO_EXPR
1548 && TREE_CODE (else_stmt) == GOTO_EXPR
1549 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1551 *stmt_p = then_stmt;
1552 data->repeat = true;
1555 /* If the THEN/ELSE clause merely assigns a value to a variable or
1556 parameter which is already known to contain that value, then
1557 remove the useless THEN/ELSE clause. */
1558 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1560 if (else_stmt
1561 && TREE_CODE (else_stmt) == MODIFY_EXPR
1562 && TREE_OPERAND (else_stmt, 0) == cond
1563 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1564 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1566 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1567 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1568 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1569 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1571 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1572 ? then_stmt : else_stmt);
1573 tree *location = (TREE_CODE (cond) == EQ_EXPR
1574 ? &COND_EXPR_THEN (*stmt_p)
1575 : &COND_EXPR_ELSE (*stmt_p));
1577 if (stmt
1578 && TREE_CODE (stmt) == MODIFY_EXPR
1579 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1580 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1581 *location = alloc_stmt_list ();
1585 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1586 would be re-introduced during lowering. */
1587 data->last_goto = NULL;
1591 static void
1592 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1594 bool save_may_branch, save_may_throw;
1595 bool this_may_branch, this_may_throw;
1597 /* Collect may_branch and may_throw information for the body only. */
1598 save_may_branch = data->may_branch;
1599 save_may_throw = data->may_throw;
1600 data->may_branch = false;
1601 data->may_throw = false;
1602 data->last_goto = NULL;
1604 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1606 this_may_branch = data->may_branch;
1607 this_may_throw = data->may_throw;
1608 data->may_branch |= save_may_branch;
1609 data->may_throw |= save_may_throw;
1610 data->last_goto = NULL;
1612 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1614 /* If the body is empty, then we can emit the FINALLY block without
1615 the enclosing TRY_FINALLY_EXPR. */
1616 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1618 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1619 data->repeat = true;
1622 /* If the handler is empty, then we can emit the TRY block without
1623 the enclosing TRY_FINALLY_EXPR. */
1624 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1626 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1627 data->repeat = true;
1630 /* If the body neither throws, nor branches, then we can safely
1631 string the TRY and FINALLY blocks together. */
1632 else if (!this_may_branch && !this_may_throw)
1634 tree stmt = *stmt_p;
1635 *stmt_p = TREE_OPERAND (stmt, 0);
1636 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1637 data->repeat = true;
1642 static void
1643 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1645 bool save_may_throw, this_may_throw;
1646 tree_stmt_iterator i;
1647 tree stmt;
1649 /* Collect may_throw information for the body only. */
1650 save_may_throw = data->may_throw;
1651 data->may_throw = false;
1652 data->last_goto = NULL;
1654 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1656 this_may_throw = data->may_throw;
1657 data->may_throw = save_may_throw;
1659 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1660 if (!this_may_throw)
1662 if (warn_notreached)
1663 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1664 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1665 data->repeat = true;
1666 return;
1669 /* Process the catch clause specially. We may be able to tell that
1670 no exceptions propagate past this point. */
1672 this_may_throw = true;
1673 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1674 stmt = tsi_stmt (i);
1675 data->last_goto = NULL;
1677 switch (TREE_CODE (stmt))
1679 case CATCH_EXPR:
1680 for (; !tsi_end_p (i); tsi_next (&i))
1682 stmt = tsi_stmt (i);
1683 /* If we catch all exceptions, then the body does not
1684 propagate exceptions past this point. */
1685 if (CATCH_TYPES (stmt) == NULL)
1686 this_may_throw = false;
1687 data->last_goto = NULL;
1688 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1690 break;
1692 case EH_FILTER_EXPR:
1693 if (EH_FILTER_MUST_NOT_THROW (stmt))
1694 this_may_throw = false;
1695 else if (EH_FILTER_TYPES (stmt) == NULL)
1696 this_may_throw = false;
1697 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1698 break;
1700 default:
1701 /* Otherwise this is a cleanup. */
1702 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1704 /* If the cleanup is empty, then we can emit the TRY block without
1705 the enclosing TRY_CATCH_EXPR. */
1706 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1708 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1709 data->repeat = true;
1711 break;
1713 data->may_throw |= this_may_throw;
1717 static void
1718 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1720 tree block;
1722 /* First remove anything underneath the BIND_EXPR. */
1723 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1725 /* If the BIND_EXPR has no variables, then we can pull everything
1726 up one level and remove the BIND_EXPR, unless this is the toplevel
1727 BIND_EXPR for the current function or an inlined function.
1729 When this situation occurs we will want to apply this
1730 optimization again. */
1731 block = BIND_EXPR_BLOCK (*stmt_p);
1732 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1733 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1734 && (! block
1735 || ! BLOCK_ABSTRACT_ORIGIN (block)
1736 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1737 != FUNCTION_DECL)))
1739 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1740 data->repeat = true;
1745 static void
1746 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1748 tree dest = GOTO_DESTINATION (*stmt_p);
1750 data->may_branch = true;
1751 data->last_goto = NULL;
1753 /* Record the last goto expr, so that we can delete it if unnecessary. */
1754 if (TREE_CODE (dest) == LABEL_DECL)
1755 data->last_goto = stmt_p;
1759 static void
1760 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1762 tree label = LABEL_EXPR_LABEL (*stmt_p);
1764 data->has_label = true;
1766 /* We do want to jump across non-local label receiver code. */
1767 if (DECL_NONLOCAL (label))
1768 data->last_goto = NULL;
1770 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1772 *data->last_goto = build_empty_stmt ();
1773 data->repeat = true;
1776 /* ??? Add something here to delete unused labels. */
1780 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1781 decl. This allows us to eliminate redundant or useless
1782 calls to "const" functions.
1784 Gimplifier already does the same operation, but we may notice functions
1785 being const and pure once their calls has been gimplified, so we need
1786 to update the flag. */
1788 static void
1789 update_call_expr_flags (tree call)
1791 tree decl = get_callee_fndecl (call);
1792 if (!decl)
1793 return;
1794 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1795 TREE_SIDE_EFFECTS (call) = 0;
1796 if (TREE_NOTHROW (decl))
1797 TREE_NOTHROW (call) = 1;
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1803 void
1804 notice_special_calls (tree t)
1806 int flags = call_expr_flags (t);
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 current_function_calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 current_function_calls_setjmp = true;
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1818 void
1819 clear_special_calls (void)
1821 current_function_calls_alloca = false;
1822 current_function_calls_setjmp = false;
1826 static void
1827 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1829 tree t = *tp, op;
1831 switch (TREE_CODE (t))
1833 case COND_EXPR:
1834 remove_useless_stmts_cond (tp, data);
1835 break;
1837 case TRY_FINALLY_EXPR:
1838 remove_useless_stmts_tf (tp, data);
1839 break;
1841 case TRY_CATCH_EXPR:
1842 remove_useless_stmts_tc (tp, data);
1843 break;
1845 case BIND_EXPR:
1846 remove_useless_stmts_bind (tp, data);
1847 break;
1849 case GOTO_EXPR:
1850 remove_useless_stmts_goto (tp, data);
1851 break;
1853 case LABEL_EXPR:
1854 remove_useless_stmts_label (tp, data);
1855 break;
1857 case RETURN_EXPR:
1858 fold_stmt (tp);
1859 data->last_goto = NULL;
1860 data->may_branch = true;
1861 break;
1863 case CALL_EXPR:
1864 fold_stmt (tp);
1865 data->last_goto = NULL;
1866 notice_special_calls (t);
1867 update_call_expr_flags (t);
1868 if (tree_could_throw_p (t))
1869 data->may_throw = true;
1870 break;
1872 case MODIFY_EXPR:
1873 data->last_goto = NULL;
1874 fold_stmt (tp);
1875 op = get_call_expr_in (t);
1876 if (op)
1878 update_call_expr_flags (op);
1879 notice_special_calls (op);
1881 if (tree_could_throw_p (t))
1882 data->may_throw = true;
1883 break;
1885 case STATEMENT_LIST:
1887 tree_stmt_iterator i = tsi_start (t);
1888 while (!tsi_end_p (i))
1890 t = tsi_stmt (i);
1891 if (IS_EMPTY_STMT (t))
1893 tsi_delink (&i);
1894 continue;
1897 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1899 t = tsi_stmt (i);
1900 if (TREE_CODE (t) == STATEMENT_LIST)
1902 tsi_link_before (&i, t, TSI_SAME_STMT);
1903 tsi_delink (&i);
1905 else
1906 tsi_next (&i);
1909 break;
1910 case ASM_EXPR:
1911 fold_stmt (tp);
1912 data->last_goto = NULL;
1913 break;
1915 default:
1916 data->last_goto = NULL;
1917 break;
1921 static unsigned int
1922 remove_useless_stmts (void)
1924 struct rus_data data;
1926 clear_special_calls ();
1930 memset (&data, 0, sizeof (data));
1931 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1933 while (data.repeat);
1934 return 0;
1938 struct tree_opt_pass pass_remove_useless_stmts =
1940 "useless", /* name */
1941 NULL, /* gate */
1942 remove_useless_stmts, /* execute */
1943 NULL, /* sub */
1944 NULL, /* next */
1945 0, /* static_pass_number */
1946 0, /* tv_id */
1947 PROP_gimple_any, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 TODO_dump_func, /* todo_flags_finish */
1952 0 /* letter */
1955 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1957 static void
1958 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1960 tree phi;
1962 /* Since this block is no longer reachable, we can just delete all
1963 of its PHI nodes. */
1964 phi = phi_nodes (bb);
1965 while (phi)
1967 tree next = PHI_CHAIN (phi);
1968 remove_phi_node (phi, NULL_TREE);
1969 phi = next;
1972 /* Remove edges to BB's successors. */
1973 while (EDGE_COUNT (bb->succs) > 0)
1974 remove_edge (EDGE_SUCC (bb, 0));
1978 /* Remove statements of basic block BB. */
1980 static void
1981 remove_bb (basic_block bb)
1983 block_stmt_iterator i;
1984 #ifdef USE_MAPPED_LOCATION
1985 source_location loc = UNKNOWN_LOCATION;
1986 #else
1987 source_locus loc = 0;
1988 #endif
1990 if (dump_file)
1992 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1993 if (dump_flags & TDF_DETAILS)
1995 dump_bb (bb, dump_file, 0);
1996 fprintf (dump_file, "\n");
2000 /* If we remove the header or the latch of a loop, mark the loop for
2001 removal by setting its header and latch to NULL. */
2002 if (current_loops)
2004 struct loop *loop = bb->loop_father;
2006 if (loop->latch == bb
2007 || loop->header == bb)
2009 loop->latch = NULL;
2010 loop->header = NULL;
2012 /* Also clean up the information associated with the loop. Updating
2013 it would waste time. More importantly, it may refer to ssa
2014 names that were defined in other removed basic block -- these
2015 ssa names are now removed and invalid. */
2016 free_numbers_of_iterations_estimates_loop (loop);
2020 /* Remove all the instructions in the block. */
2021 for (i = bsi_start (bb); !bsi_end_p (i);)
2023 tree stmt = bsi_stmt (i);
2024 if (TREE_CODE (stmt) == LABEL_EXPR
2025 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2026 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2028 basic_block new_bb;
2029 block_stmt_iterator new_bsi;
2031 /* A non-reachable non-local label may still be referenced.
2032 But it no longer needs to carry the extra semantics of
2033 non-locality. */
2034 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2036 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2037 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2040 new_bb = bb->prev_bb;
2041 new_bsi = bsi_start (new_bb);
2042 bsi_remove (&i, false);
2043 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2045 else
2047 /* Release SSA definitions if we are in SSA. Note that we
2048 may be called when not in SSA. For example,
2049 final_cleanup calls this function via
2050 cleanup_tree_cfg. */
2051 if (in_ssa_p)
2052 release_defs (stmt);
2054 bsi_remove (&i, true);
2057 /* Don't warn for removed gotos. Gotos are often removed due to
2058 jump threading, thus resulting in bogus warnings. Not great,
2059 since this way we lose warnings for gotos in the original
2060 program that are indeed unreachable. */
2061 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2063 #ifdef USE_MAPPED_LOCATION
2064 if (EXPR_HAS_LOCATION (stmt))
2065 loc = EXPR_LOCATION (stmt);
2066 #else
2067 source_locus t;
2068 t = EXPR_LOCUS (stmt);
2069 if (t && LOCATION_LINE (*t) > 0)
2070 loc = t;
2071 #endif
2075 /* If requested, give a warning that the first statement in the
2076 block is unreachable. We walk statements backwards in the
2077 loop above, so the last statement we process is the first statement
2078 in the block. */
2079 #ifdef USE_MAPPED_LOCATION
2080 if (loc > BUILTINS_LOCATION)
2081 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2082 #else
2083 if (loc)
2084 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2085 #endif
2087 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2091 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2092 predicate VAL, return the edge that will be taken out of the block.
2093 If VAL does not match a unique edge, NULL is returned. */
2095 edge
2096 find_taken_edge (basic_block bb, tree val)
2098 tree stmt;
2100 stmt = last_stmt (bb);
2102 gcc_assert (stmt);
2103 gcc_assert (is_ctrl_stmt (stmt));
2104 gcc_assert (val);
2106 if (! is_gimple_min_invariant (val))
2107 return NULL;
2109 if (TREE_CODE (stmt) == COND_EXPR)
2110 return find_taken_edge_cond_expr (bb, val);
2112 if (TREE_CODE (stmt) == SWITCH_EXPR)
2113 return find_taken_edge_switch_expr (bb, val);
2115 if (computed_goto_p (stmt))
2116 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2118 gcc_unreachable ();
2121 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2122 statement, determine which of the outgoing edges will be taken out of the
2123 block. Return NULL if either edge may be taken. */
2125 static edge
2126 find_taken_edge_computed_goto (basic_block bb, tree val)
2128 basic_block dest;
2129 edge e = NULL;
2131 dest = label_to_block (val);
2132 if (dest)
2134 e = find_edge (bb, dest);
2135 gcc_assert (e != NULL);
2138 return e;
2141 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2142 statement, determine which of the two edges will be taken out of the
2143 block. Return NULL if either edge may be taken. */
2145 static edge
2146 find_taken_edge_cond_expr (basic_block bb, tree val)
2148 edge true_edge, false_edge;
2150 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2152 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2153 return (zero_p (val) ? false_edge : true_edge);
2156 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2157 statement, determine which edge will be taken out of the block. Return
2158 NULL if any edge may be taken. */
2160 static edge
2161 find_taken_edge_switch_expr (basic_block bb, tree val)
2163 tree switch_expr, taken_case;
2164 basic_block dest_bb;
2165 edge e;
2167 switch_expr = last_stmt (bb);
2168 taken_case = find_case_label_for_value (switch_expr, val);
2169 dest_bb = label_to_block (CASE_LABEL (taken_case));
2171 e = find_edge (bb, dest_bb);
2172 gcc_assert (e);
2173 return e;
2177 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2178 We can make optimal use here of the fact that the case labels are
2179 sorted: We can do a binary search for a case matching VAL. */
2181 static tree
2182 find_case_label_for_value (tree switch_expr, tree val)
2184 tree vec = SWITCH_LABELS (switch_expr);
2185 size_t low, high, n = TREE_VEC_LENGTH (vec);
2186 tree default_case = TREE_VEC_ELT (vec, n - 1);
2188 for (low = -1, high = n - 1; high - low > 1; )
2190 size_t i = (high + low) / 2;
2191 tree t = TREE_VEC_ELT (vec, i);
2192 int cmp;
2194 /* Cache the result of comparing CASE_LOW and val. */
2195 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2197 if (cmp > 0)
2198 high = i;
2199 else
2200 low = i;
2202 if (CASE_HIGH (t) == NULL)
2204 /* A singe-valued case label. */
2205 if (cmp == 0)
2206 return t;
2208 else
2210 /* A case range. We can only handle integer ranges. */
2211 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2212 return t;
2216 return default_case;
2222 /*---------------------------------------------------------------------------
2223 Debugging functions
2224 ---------------------------------------------------------------------------*/
2226 /* Dump tree-specific information of block BB to file OUTF. */
2228 void
2229 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2231 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2235 /* Dump a basic block on stderr. */
2237 void
2238 debug_tree_bb (basic_block bb)
2240 dump_bb (bb, stderr, 0);
2244 /* Dump basic block with index N on stderr. */
2246 basic_block
2247 debug_tree_bb_n (int n)
2249 debug_tree_bb (BASIC_BLOCK (n));
2250 return BASIC_BLOCK (n);
2254 /* Dump the CFG on stderr.
2256 FLAGS are the same used by the tree dumping functions
2257 (see TDF_* in tree-pass.h). */
2259 void
2260 debug_tree_cfg (int flags)
2262 dump_tree_cfg (stderr, flags);
2266 /* Dump the program showing basic block boundaries on the given FILE.
2268 FLAGS are the same used by the tree dumping functions (see TDF_* in
2269 tree.h). */
2271 void
2272 dump_tree_cfg (FILE *file, int flags)
2274 if (flags & TDF_DETAILS)
2276 const char *funcname
2277 = lang_hooks.decl_printable_name (current_function_decl, 2);
2279 fputc ('\n', file);
2280 fprintf (file, ";; Function %s\n\n", funcname);
2281 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2282 n_basic_blocks, n_edges, last_basic_block);
2284 brief_dump_cfg (file);
2285 fprintf (file, "\n");
2288 if (flags & TDF_STATS)
2289 dump_cfg_stats (file);
2291 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2295 /* Dump CFG statistics on FILE. */
2297 void
2298 dump_cfg_stats (FILE *file)
2300 static long max_num_merged_labels = 0;
2301 unsigned long size, total = 0;
2302 long num_edges;
2303 basic_block bb;
2304 const char * const fmt_str = "%-30s%-13s%12s\n";
2305 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2306 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2307 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2308 const char *funcname
2309 = lang_hooks.decl_printable_name (current_function_decl, 2);
2312 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2314 fprintf (file, "---------------------------------------------------------\n");
2315 fprintf (file, fmt_str, "", " Number of ", "Memory");
2316 fprintf (file, fmt_str, "", " instances ", "used ");
2317 fprintf (file, "---------------------------------------------------------\n");
2319 size = n_basic_blocks * sizeof (struct basic_block_def);
2320 total += size;
2321 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2322 SCALE (size), LABEL (size));
2324 num_edges = 0;
2325 FOR_EACH_BB (bb)
2326 num_edges += EDGE_COUNT (bb->succs);
2327 size = num_edges * sizeof (struct edge_def);
2328 total += size;
2329 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2331 fprintf (file, "---------------------------------------------------------\n");
2332 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2333 LABEL (total));
2334 fprintf (file, "---------------------------------------------------------\n");
2335 fprintf (file, "\n");
2337 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2338 max_num_merged_labels = cfg_stats.num_merged_labels;
2340 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2341 cfg_stats.num_merged_labels, max_num_merged_labels);
2343 fprintf (file, "\n");
2347 /* Dump CFG statistics on stderr. Keep extern so that it's always
2348 linked in the final executable. */
2350 void
2351 debug_cfg_stats (void)
2353 dump_cfg_stats (stderr);
2357 /* Dump the flowgraph to a .vcg FILE. */
2359 static void
2360 tree_cfg2vcg (FILE *file)
2362 edge e;
2363 edge_iterator ei;
2364 basic_block bb;
2365 const char *funcname
2366 = lang_hooks.decl_printable_name (current_function_decl, 2);
2368 /* Write the file header. */
2369 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2370 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2371 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2373 /* Write blocks and edges. */
2374 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2376 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2377 e->dest->index);
2379 if (e->flags & EDGE_FAKE)
2380 fprintf (file, " linestyle: dotted priority: 10");
2381 else
2382 fprintf (file, " linestyle: solid priority: 100");
2384 fprintf (file, " }\n");
2386 fputc ('\n', file);
2388 FOR_EACH_BB (bb)
2390 enum tree_code head_code, end_code;
2391 const char *head_name, *end_name;
2392 int head_line = 0;
2393 int end_line = 0;
2394 tree first = first_stmt (bb);
2395 tree last = last_stmt (bb);
2397 if (first)
2399 head_code = TREE_CODE (first);
2400 head_name = tree_code_name[head_code];
2401 head_line = get_lineno (first);
2403 else
2404 head_name = "no-statement";
2406 if (last)
2408 end_code = TREE_CODE (last);
2409 end_name = tree_code_name[end_code];
2410 end_line = get_lineno (last);
2412 else
2413 end_name = "no-statement";
2415 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2416 bb->index, bb->index, head_name, head_line, end_name,
2417 end_line);
2419 FOR_EACH_EDGE (e, ei, bb->succs)
2421 if (e->dest == EXIT_BLOCK_PTR)
2422 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2423 else
2424 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2426 if (e->flags & EDGE_FAKE)
2427 fprintf (file, " priority: 10 linestyle: dotted");
2428 else
2429 fprintf (file, " priority: 100 linestyle: solid");
2431 fprintf (file, " }\n");
2434 if (bb->next_bb != EXIT_BLOCK_PTR)
2435 fputc ('\n', file);
2438 fputs ("}\n\n", file);
2443 /*---------------------------------------------------------------------------
2444 Miscellaneous helpers
2445 ---------------------------------------------------------------------------*/
2447 /* Return true if T represents a stmt that always transfers control. */
2449 bool
2450 is_ctrl_stmt (tree t)
2452 return (TREE_CODE (t) == COND_EXPR
2453 || TREE_CODE (t) == SWITCH_EXPR
2454 || TREE_CODE (t) == GOTO_EXPR
2455 || TREE_CODE (t) == RETURN_EXPR
2456 || TREE_CODE (t) == RESX_EXPR);
2460 /* Return true if T is a statement that may alter the flow of control
2461 (e.g., a call to a non-returning function). */
2463 bool
2464 is_ctrl_altering_stmt (tree t)
2466 tree call;
2468 gcc_assert (t);
2469 call = get_call_expr_in (t);
2470 if (call)
2472 /* A non-pure/const CALL_EXPR alters flow control if the current
2473 function has nonlocal labels. */
2474 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2475 return true;
2477 /* A CALL_EXPR also alters control flow if it does not return. */
2478 if (call_expr_flags (call) & ECF_NORETURN)
2479 return true;
2482 /* OpenMP directives alter control flow. */
2483 if (OMP_DIRECTIVE_P (t))
2484 return true;
2486 /* If a statement can throw, it alters control flow. */
2487 return tree_can_throw_internal (t);
2491 /* Return true if T is a computed goto. */
2493 bool
2494 computed_goto_p (tree t)
2496 return (TREE_CODE (t) == GOTO_EXPR
2497 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2501 /* Return true if T is a simple local goto. */
2503 bool
2504 simple_goto_p (tree t)
2506 return (TREE_CODE (t) == GOTO_EXPR
2507 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2511 /* Return true if T can make an abnormal transfer of control flow.
2512 Transfers of control flow associated with EH are excluded. */
2514 bool
2515 tree_can_make_abnormal_goto (tree t)
2517 if (computed_goto_p (t))
2518 return true;
2519 if (TREE_CODE (t) == MODIFY_EXPR)
2520 t = TREE_OPERAND (t, 1);
2521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2522 t = TREE_OPERAND (t, 0);
2523 if (TREE_CODE (t) == CALL_EXPR)
2524 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2525 return false;
2529 /* Return true if T should start a new basic block. PREV_T is the
2530 statement preceding T. It is used when T is a label or a case label.
2531 Labels should only start a new basic block if their previous statement
2532 wasn't a label. Otherwise, sequence of labels would generate
2533 unnecessary basic blocks that only contain a single label. */
2535 static inline bool
2536 stmt_starts_bb_p (tree t, tree prev_t)
2538 if (t == NULL_TREE)
2539 return false;
2541 /* LABEL_EXPRs start a new basic block only if the preceding
2542 statement wasn't a label of the same type. This prevents the
2543 creation of consecutive blocks that have nothing but a single
2544 label. */
2545 if (TREE_CODE (t) == LABEL_EXPR)
2547 /* Nonlocal and computed GOTO targets always start a new block. */
2548 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2549 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2550 return true;
2552 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2554 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2555 return true;
2557 cfg_stats.num_merged_labels++;
2558 return false;
2560 else
2561 return true;
2564 return false;
2568 /* Return true if T should end a basic block. */
2570 bool
2571 stmt_ends_bb_p (tree t)
2573 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2577 /* Add gotos that used to be represented implicitly in the CFG. */
2579 void
2580 disband_implicit_edges (void)
2582 basic_block bb;
2583 block_stmt_iterator last;
2584 edge e;
2585 edge_iterator ei;
2586 tree stmt, label;
2588 FOR_EACH_BB (bb)
2590 last = bsi_last (bb);
2591 stmt = last_stmt (bb);
2593 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2595 /* Remove superfluous gotos from COND_EXPR branches. Moved
2596 from cfg_remove_useless_stmts here since it violates the
2597 invariants for tree--cfg correspondence and thus fits better
2598 here where we do it anyway. */
2599 e = find_edge (bb, bb->next_bb);
2600 if (e)
2602 if (e->flags & EDGE_TRUE_VALUE)
2603 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2604 else if (e->flags & EDGE_FALSE_VALUE)
2605 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2606 else
2607 gcc_unreachable ();
2608 e->flags |= EDGE_FALLTHRU;
2611 continue;
2614 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2616 /* Remove the RETURN_EXPR if we may fall though to the exit
2617 instead. */
2618 gcc_assert (single_succ_p (bb));
2619 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2621 if (bb->next_bb == EXIT_BLOCK_PTR
2622 && !TREE_OPERAND (stmt, 0))
2624 bsi_remove (&last, true);
2625 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2627 continue;
2630 /* There can be no fallthru edge if the last statement is a control
2631 one. */
2632 if (stmt && is_ctrl_stmt (stmt))
2633 continue;
2635 /* Find a fallthru edge and emit the goto if necessary. */
2636 FOR_EACH_EDGE (e, ei, bb->succs)
2637 if (e->flags & EDGE_FALLTHRU)
2638 break;
2640 if (!e || e->dest == bb->next_bb)
2641 continue;
2643 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2644 label = tree_block_label (e->dest);
2646 stmt = build1 (GOTO_EXPR, void_type_node, label);
2647 #ifdef USE_MAPPED_LOCATION
2648 SET_EXPR_LOCATION (stmt, e->goto_locus);
2649 #else
2650 SET_EXPR_LOCUS (stmt, e->goto_locus);
2651 #endif
2652 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2653 e->flags &= ~EDGE_FALLTHRU;
2657 /* Remove block annotations and other datastructures. */
2659 void
2660 delete_tree_cfg_annotations (void)
2662 label_to_block_map = NULL;
2666 /* Return the first statement in basic block BB. */
2668 tree
2669 first_stmt (basic_block bb)
2671 block_stmt_iterator i = bsi_start (bb);
2672 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2676 /* Return the last statement in basic block BB. */
2678 tree
2679 last_stmt (basic_block bb)
2681 block_stmt_iterator b = bsi_last (bb);
2682 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2686 /* Return a pointer to the last statement in block BB. */
2688 tree *
2689 last_stmt_ptr (basic_block bb)
2691 block_stmt_iterator last = bsi_last (bb);
2692 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2696 /* Return the last statement of an otherwise empty block. Return NULL
2697 if the block is totally empty, or if it contains more than one
2698 statement. */
2700 tree
2701 last_and_only_stmt (basic_block bb)
2703 block_stmt_iterator i = bsi_last (bb);
2704 tree last, prev;
2706 if (bsi_end_p (i))
2707 return NULL_TREE;
2709 last = bsi_stmt (i);
2710 bsi_prev (&i);
2711 if (bsi_end_p (i))
2712 return last;
2714 /* Empty statements should no longer appear in the instruction stream.
2715 Everything that might have appeared before should be deleted by
2716 remove_useless_stmts, and the optimizers should just bsi_remove
2717 instead of smashing with build_empty_stmt.
2719 Thus the only thing that should appear here in a block containing
2720 one executable statement is a label. */
2721 prev = bsi_stmt (i);
2722 if (TREE_CODE (prev) == LABEL_EXPR)
2723 return last;
2724 else
2725 return NULL_TREE;
2729 /* Mark BB as the basic block holding statement T. */
2731 void
2732 set_bb_for_stmt (tree t, basic_block bb)
2734 if (TREE_CODE (t) == PHI_NODE)
2735 PHI_BB (t) = bb;
2736 else if (TREE_CODE (t) == STATEMENT_LIST)
2738 tree_stmt_iterator i;
2739 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2740 set_bb_for_stmt (tsi_stmt (i), bb);
2742 else
2744 stmt_ann_t ann = get_stmt_ann (t);
2745 ann->bb = bb;
2747 /* If the statement is a label, add the label to block-to-labels map
2748 so that we can speed up edge creation for GOTO_EXPRs. */
2749 if (TREE_CODE (t) == LABEL_EXPR)
2751 int uid;
2753 t = LABEL_EXPR_LABEL (t);
2754 uid = LABEL_DECL_UID (t);
2755 if (uid == -1)
2757 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2758 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2759 if (old_len <= (unsigned) uid)
2761 basic_block *addr;
2762 unsigned new_len = 3 * uid / 2;
2764 VEC_safe_grow (basic_block, gc, label_to_block_map,
2765 new_len);
2766 addr = VEC_address (basic_block, label_to_block_map);
2767 memset (&addr[old_len],
2768 0, sizeof (basic_block) * (new_len - old_len));
2771 else
2772 /* We're moving an existing label. Make sure that we've
2773 removed it from the old block. */
2774 gcc_assert (!bb
2775 || !VEC_index (basic_block, label_to_block_map, uid));
2776 VEC_replace (basic_block, label_to_block_map, uid, bb);
2781 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2782 from one basic block to another.
2783 For BB splitting we can run into quadratic case, so performance is quite
2784 important and knowing that the tables are big enough, change_bb_for_stmt
2785 can inline as leaf function. */
2786 static inline void
2787 change_bb_for_stmt (tree t, basic_block bb)
2789 get_stmt_ann (t)->bb = bb;
2790 if (TREE_CODE (t) == LABEL_EXPR)
2791 VEC_replace (basic_block, label_to_block_map,
2792 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2795 /* Finds iterator for STMT. */
2797 extern block_stmt_iterator
2798 bsi_for_stmt (tree stmt)
2800 block_stmt_iterator bsi;
2802 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2803 if (bsi_stmt (bsi) == stmt)
2804 return bsi;
2806 gcc_unreachable ();
2809 /* Mark statement T as modified, and update it. */
2810 static inline void
2811 update_modified_stmts (tree t)
2813 if (TREE_CODE (t) == STATEMENT_LIST)
2815 tree_stmt_iterator i;
2816 tree stmt;
2817 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2819 stmt = tsi_stmt (i);
2820 update_stmt_if_modified (stmt);
2823 else
2824 update_stmt_if_modified (t);
2827 /* Insert statement (or statement list) T before the statement
2828 pointed-to by iterator I. M specifies how to update iterator I
2829 after insertion (see enum bsi_iterator_update). */
2831 void
2832 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2834 set_bb_for_stmt (t, i->bb);
2835 update_modified_stmts (t);
2836 tsi_link_before (&i->tsi, t, m);
2840 /* Insert statement (or statement list) T after the statement
2841 pointed-to by iterator I. M specifies how to update iterator I
2842 after insertion (see enum bsi_iterator_update). */
2844 void
2845 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2847 set_bb_for_stmt (t, i->bb);
2848 update_modified_stmts (t);
2849 tsi_link_after (&i->tsi, t, m);
2853 /* Remove the statement pointed to by iterator I. The iterator is updated
2854 to the next statement.
2856 When REMOVE_EH_INFO is true we remove the statement pointed to by
2857 iterator I from the EH tables. Otherwise we do not modify the EH
2858 tables.
2860 Generally, REMOVE_EH_INFO should be true when the statement is going to
2861 be removed from the IL and not reinserted elsewhere. */
2863 void
2864 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2866 tree t = bsi_stmt (*i);
2867 set_bb_for_stmt (t, NULL);
2868 delink_stmt_imm_use (t);
2869 tsi_delink (&i->tsi);
2870 mark_stmt_modified (t);
2871 if (remove_eh_info)
2872 remove_stmt_from_eh_region (t);
2876 /* Move the statement at FROM so it comes right after the statement at TO. */
2878 void
2879 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2881 tree stmt = bsi_stmt (*from);
2882 bsi_remove (from, false);
2883 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2887 /* Move the statement at FROM so it comes right before the statement at TO. */
2889 void
2890 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2892 tree stmt = bsi_stmt (*from);
2893 bsi_remove (from, false);
2894 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2898 /* Move the statement at FROM to the end of basic block BB. */
2900 void
2901 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2903 block_stmt_iterator last = bsi_last (bb);
2905 /* Have to check bsi_end_p because it could be an empty block. */
2906 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2907 bsi_move_before (from, &last);
2908 else
2909 bsi_move_after (from, &last);
2913 /* Replace the contents of the statement pointed to by iterator BSI
2914 with STMT. If UPDATE_EH_INFO is true, the exception handling
2915 information of the original statement is moved to the new statement. */
2917 void
2918 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2920 int eh_region;
2921 tree orig_stmt = bsi_stmt (*bsi);
2923 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2924 set_bb_for_stmt (stmt, bsi->bb);
2926 /* Preserve EH region information from the original statement, if
2927 requested by the caller. */
2928 if (update_eh_info)
2930 eh_region = lookup_stmt_eh_region (orig_stmt);
2931 if (eh_region >= 0)
2933 remove_stmt_from_eh_region (orig_stmt);
2934 add_stmt_to_eh_region (stmt, eh_region);
2938 delink_stmt_imm_use (orig_stmt);
2939 *bsi_stmt_ptr (*bsi) = stmt;
2940 mark_stmt_modified (stmt);
2941 update_modified_stmts (stmt);
2945 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2946 is made to place the statement in an existing basic block, but
2947 sometimes that isn't possible. When it isn't possible, the edge is
2948 split and the statement is added to the new block.
2950 In all cases, the returned *BSI points to the correct location. The
2951 return value is true if insertion should be done after the location,
2952 or false if it should be done before the location. If new basic block
2953 has to be created, it is stored in *NEW_BB. */
2955 static bool
2956 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2957 basic_block *new_bb)
2959 basic_block dest, src;
2960 tree tmp;
2962 dest = e->dest;
2963 restart:
2965 /* If the destination has one predecessor which has no PHI nodes,
2966 insert there. Except for the exit block.
2968 The requirement for no PHI nodes could be relaxed. Basically we
2969 would have to examine the PHIs to prove that none of them used
2970 the value set by the statement we want to insert on E. That
2971 hardly seems worth the effort. */
2972 if (single_pred_p (dest)
2973 && ! phi_nodes (dest)
2974 && dest != EXIT_BLOCK_PTR)
2976 *bsi = bsi_start (dest);
2977 if (bsi_end_p (*bsi))
2978 return true;
2980 /* Make sure we insert after any leading labels. */
2981 tmp = bsi_stmt (*bsi);
2982 while (TREE_CODE (tmp) == LABEL_EXPR)
2984 bsi_next (bsi);
2985 if (bsi_end_p (*bsi))
2986 break;
2987 tmp = bsi_stmt (*bsi);
2990 if (bsi_end_p (*bsi))
2992 *bsi = bsi_last (dest);
2993 return true;
2995 else
2996 return false;
2999 /* If the source has one successor, the edge is not abnormal and
3000 the last statement does not end a basic block, insert there.
3001 Except for the entry block. */
3002 src = e->src;
3003 if ((e->flags & EDGE_ABNORMAL) == 0
3004 && single_succ_p (src)
3005 && src != ENTRY_BLOCK_PTR)
3007 *bsi = bsi_last (src);
3008 if (bsi_end_p (*bsi))
3009 return true;
3011 tmp = bsi_stmt (*bsi);
3012 if (!stmt_ends_bb_p (tmp))
3013 return true;
3015 /* Insert code just before returning the value. We may need to decompose
3016 the return in the case it contains non-trivial operand. */
3017 if (TREE_CODE (tmp) == RETURN_EXPR)
3019 tree op = TREE_OPERAND (tmp, 0);
3020 if (op && !is_gimple_val (op))
3022 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3023 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3024 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3026 bsi_prev (bsi);
3027 return true;
3031 /* Otherwise, create a new basic block, and split this edge. */
3032 dest = split_edge (e);
3033 if (new_bb)
3034 *new_bb = dest;
3035 e = single_pred_edge (dest);
3036 goto restart;
3040 /* This routine will commit all pending edge insertions, creating any new
3041 basic blocks which are necessary. */
3043 void
3044 bsi_commit_edge_inserts (void)
3046 basic_block bb;
3047 edge e;
3048 edge_iterator ei;
3050 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3052 FOR_EACH_BB (bb)
3053 FOR_EACH_EDGE (e, ei, bb->succs)
3054 bsi_commit_one_edge_insert (e, NULL);
3058 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3059 to this block, otherwise set it to NULL. */
3061 void
3062 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3064 if (new_bb)
3065 *new_bb = NULL;
3066 if (PENDING_STMT (e))
3068 block_stmt_iterator bsi;
3069 tree stmt = PENDING_STMT (e);
3071 PENDING_STMT (e) = NULL_TREE;
3073 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3081 /* Add STMT to the pending list of edge E. No actual insertion is
3082 made until a call to bsi_commit_edge_inserts () is made. */
3084 void
3085 bsi_insert_on_edge (edge e, tree stmt)
3087 append_to_statement_list (stmt, &PENDING_STMT (e));
3090 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3091 block has to be created, it is returned. */
3093 basic_block
3094 bsi_insert_on_edge_immediate (edge e, tree stmt)
3096 block_stmt_iterator bsi;
3097 basic_block new_bb = NULL;
3099 gcc_assert (!PENDING_STMT (e));
3101 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3102 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3103 else
3104 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3106 return new_bb;
3109 /*---------------------------------------------------------------------------
3110 Tree specific functions for CFG manipulation
3111 ---------------------------------------------------------------------------*/
3113 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3115 static void
3116 reinstall_phi_args (edge new_edge, edge old_edge)
3118 tree var, phi;
3120 if (!PENDING_STMT (old_edge))
3121 return;
3123 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3124 var && phi;
3125 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3127 tree result = TREE_PURPOSE (var);
3128 tree arg = TREE_VALUE (var);
3130 gcc_assert (result == PHI_RESULT (phi));
3132 add_phi_arg (phi, arg, new_edge);
3135 PENDING_STMT (old_edge) = NULL;
3138 /* Returns the basic block after which the new basic block created
3139 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3140 near its "logical" location. This is of most help to humans looking
3141 at debugging dumps. */
3143 static basic_block
3144 split_edge_bb_loc (edge edge_in)
3146 basic_block dest = edge_in->dest;
3148 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3149 return edge_in->src;
3150 else
3151 return dest->prev_bb;
3154 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3155 Abort on abnormal edges. */
3157 static basic_block
3158 tree_split_edge (edge edge_in)
3160 basic_block new_bb, after_bb, dest;
3161 edge new_edge, e;
3163 /* Abnormal edges cannot be split. */
3164 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3166 dest = edge_in->dest;
3168 after_bb = split_edge_bb_loc (edge_in);
3170 new_bb = create_empty_bb (after_bb);
3171 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3172 new_bb->count = edge_in->count;
3173 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3174 new_edge->probability = REG_BR_PROB_BASE;
3175 new_edge->count = edge_in->count;
3177 e = redirect_edge_and_branch (edge_in, new_bb);
3178 gcc_assert (e);
3179 reinstall_phi_args (new_edge, e);
3181 return new_bb;
3185 /* Return true when BB has label LABEL in it. */
3187 static bool
3188 has_label_p (basic_block bb, tree label)
3190 block_stmt_iterator bsi;
3192 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3194 tree stmt = bsi_stmt (bsi);
3196 if (TREE_CODE (stmt) != LABEL_EXPR)
3197 return false;
3198 if (LABEL_EXPR_LABEL (stmt) == label)
3199 return true;
3201 return false;
3205 /* Callback for walk_tree, check that all elements with address taken are
3206 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3207 inside a PHI node. */
3209 static tree
3210 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3212 tree t = *tp, x;
3213 bool in_phi = (data != NULL);
3215 if (TYPE_P (t))
3216 *walk_subtrees = 0;
3218 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3219 #define CHECK_OP(N, MSG) \
3220 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3221 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3223 switch (TREE_CODE (t))
3225 case SSA_NAME:
3226 if (SSA_NAME_IN_FREE_LIST (t))
3228 error ("SSA name in freelist but still referenced");
3229 return *tp;
3231 break;
3233 case ASSERT_EXPR:
3234 x = fold (ASSERT_EXPR_COND (t));
3235 if (x == boolean_false_node)
3237 error ("ASSERT_EXPR with an always-false condition");
3238 return *tp;
3240 break;
3242 case MODIFY_EXPR:
3243 x = TREE_OPERAND (t, 0);
3244 if (TREE_CODE (x) == BIT_FIELD_REF
3245 && is_gimple_reg (TREE_OPERAND (x, 0)))
3247 error ("GIMPLE register modified with BIT_FIELD_REF");
3248 return t;
3250 break;
3252 case ADDR_EXPR:
3254 bool old_invariant;
3255 bool old_constant;
3256 bool old_side_effects;
3257 bool new_invariant;
3258 bool new_constant;
3259 bool new_side_effects;
3261 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3262 dead PHIs that take the address of something. But if the PHI
3263 result is dead, the fact that it takes the address of anything
3264 is irrelevant. Because we can not tell from here if a PHI result
3265 is dead, we just skip this check for PHIs altogether. This means
3266 we may be missing "valid" checks, but what can you do?
3267 This was PR19217. */
3268 if (in_phi)
3269 break;
3271 old_invariant = TREE_INVARIANT (t);
3272 old_constant = TREE_CONSTANT (t);
3273 old_side_effects = TREE_SIDE_EFFECTS (t);
3275 recompute_tree_invariant_for_addr_expr (t);
3276 new_invariant = TREE_INVARIANT (t);
3277 new_side_effects = TREE_SIDE_EFFECTS (t);
3278 new_constant = TREE_CONSTANT (t);
3280 if (old_invariant != new_invariant)
3282 error ("invariant not recomputed when ADDR_EXPR changed");
3283 return t;
3286 if (old_constant != new_constant)
3288 error ("constant not recomputed when ADDR_EXPR changed");
3289 return t;
3291 if (old_side_effects != new_side_effects)
3293 error ("side effects not recomputed when ADDR_EXPR changed");
3294 return t;
3297 /* Skip any references (they will be checked when we recurse down the
3298 tree) and ensure that any variable used as a prefix is marked
3299 addressable. */
3300 for (x = TREE_OPERAND (t, 0);
3301 handled_component_p (x);
3302 x = TREE_OPERAND (x, 0))
3305 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3306 return NULL;
3307 if (!TREE_ADDRESSABLE (x))
3309 error ("address taken, but ADDRESSABLE bit not set");
3310 return x;
3312 break;
3315 case COND_EXPR:
3316 x = COND_EXPR_COND (t);
3317 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3319 error ("non-boolean used in condition");
3320 return x;
3322 if (!is_gimple_condexpr (x))
3324 error ("invalid conditional operand");
3325 return x;
3327 break;
3329 case NOP_EXPR:
3330 case CONVERT_EXPR:
3331 case FIX_TRUNC_EXPR:
3332 case FIX_CEIL_EXPR:
3333 case FIX_FLOOR_EXPR:
3334 case FIX_ROUND_EXPR:
3335 case FLOAT_EXPR:
3336 case NEGATE_EXPR:
3337 case ABS_EXPR:
3338 case BIT_NOT_EXPR:
3339 case NON_LVALUE_EXPR:
3340 case TRUTH_NOT_EXPR:
3341 CHECK_OP (0, "invalid operand to unary operator");
3342 break;
3344 case REALPART_EXPR:
3345 case IMAGPART_EXPR:
3346 case COMPONENT_REF:
3347 case ARRAY_REF:
3348 case ARRAY_RANGE_REF:
3349 case BIT_FIELD_REF:
3350 case VIEW_CONVERT_EXPR:
3351 /* We have a nest of references. Verify that each of the operands
3352 that determine where to reference is either a constant or a variable,
3353 verify that the base is valid, and then show we've already checked
3354 the subtrees. */
3355 while (handled_component_p (t))
3357 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3358 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3359 else if (TREE_CODE (t) == ARRAY_REF
3360 || TREE_CODE (t) == ARRAY_RANGE_REF)
3362 CHECK_OP (1, "invalid array index");
3363 if (TREE_OPERAND (t, 2))
3364 CHECK_OP (2, "invalid array lower bound");
3365 if (TREE_OPERAND (t, 3))
3366 CHECK_OP (3, "invalid array stride");
3368 else if (TREE_CODE (t) == BIT_FIELD_REF)
3370 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3371 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3374 t = TREE_OPERAND (t, 0);
3377 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3379 error ("invalid reference prefix");
3380 return t;
3382 *walk_subtrees = 0;
3383 break;
3385 case LT_EXPR:
3386 case LE_EXPR:
3387 case GT_EXPR:
3388 case GE_EXPR:
3389 case EQ_EXPR:
3390 case NE_EXPR:
3391 case UNORDERED_EXPR:
3392 case ORDERED_EXPR:
3393 case UNLT_EXPR:
3394 case UNLE_EXPR:
3395 case UNGT_EXPR:
3396 case UNGE_EXPR:
3397 case UNEQ_EXPR:
3398 case LTGT_EXPR:
3399 case PLUS_EXPR:
3400 case MINUS_EXPR:
3401 case MULT_EXPR:
3402 case TRUNC_DIV_EXPR:
3403 case CEIL_DIV_EXPR:
3404 case FLOOR_DIV_EXPR:
3405 case ROUND_DIV_EXPR:
3406 case TRUNC_MOD_EXPR:
3407 case CEIL_MOD_EXPR:
3408 case FLOOR_MOD_EXPR:
3409 case ROUND_MOD_EXPR:
3410 case RDIV_EXPR:
3411 case EXACT_DIV_EXPR:
3412 case MIN_EXPR:
3413 case MAX_EXPR:
3414 case LSHIFT_EXPR:
3415 case RSHIFT_EXPR:
3416 case LROTATE_EXPR:
3417 case RROTATE_EXPR:
3418 case BIT_IOR_EXPR:
3419 case BIT_XOR_EXPR:
3420 case BIT_AND_EXPR:
3421 CHECK_OP (0, "invalid operand to binary operator");
3422 CHECK_OP (1, "invalid operand to binary operator");
3423 break;
3425 case CONSTRUCTOR:
3426 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3427 *walk_subtrees = 0;
3428 break;
3430 default:
3431 break;
3433 return NULL;
3435 #undef CHECK_OP
3439 /* Verify STMT, return true if STMT is not in GIMPLE form.
3440 TODO: Implement type checking. */
3442 static bool
3443 verify_stmt (tree stmt, bool last_in_block)
3445 tree addr;
3447 if (OMP_DIRECTIVE_P (stmt))
3449 /* OpenMP directives are validated by the FE and never operated
3450 on by the optimizers. Furthermore, OMP_FOR may contain
3451 non-gimple expressions when the main index variable has had
3452 its address taken. This does not affect the loop itself
3453 because the header of an OMP_FOR is merely used to determine
3454 how to setup the parallel iteration. */
3455 return false;
3458 if (!is_gimple_stmt (stmt))
3460 error ("is not a valid GIMPLE statement");
3461 goto fail;
3464 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3465 if (addr)
3467 debug_generic_stmt (addr);
3468 return true;
3471 /* If the statement is marked as part of an EH region, then it is
3472 expected that the statement could throw. Verify that when we
3473 have optimizations that simplify statements such that we prove
3474 that they cannot throw, that we update other data structures
3475 to match. */
3476 if (lookup_stmt_eh_region (stmt) >= 0)
3478 if (!tree_could_throw_p (stmt))
3480 error ("statement marked for throw, but doesn%'t");
3481 goto fail;
3483 if (!last_in_block && tree_can_throw_internal (stmt))
3485 error ("statement marked for throw in middle of block");
3486 goto fail;
3490 return false;
3492 fail:
3493 debug_generic_stmt (stmt);
3494 return true;
3498 /* Return true when the T can be shared. */
3500 static bool
3501 tree_node_can_be_shared (tree t)
3503 if (IS_TYPE_OR_DECL_P (t)
3504 || is_gimple_min_invariant (t)
3505 || TREE_CODE (t) == SSA_NAME
3506 || t == error_mark_node
3507 || TREE_CODE (t) == IDENTIFIER_NODE)
3508 return true;
3510 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3511 return true;
3513 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3514 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3515 || TREE_CODE (t) == COMPONENT_REF
3516 || TREE_CODE (t) == REALPART_EXPR
3517 || TREE_CODE (t) == IMAGPART_EXPR)
3518 t = TREE_OPERAND (t, 0);
3520 if (DECL_P (t))
3521 return true;
3523 return false;
3527 /* Called via walk_trees. Verify tree sharing. */
3529 static tree
3530 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3532 htab_t htab = (htab_t) data;
3533 void **slot;
3535 if (tree_node_can_be_shared (*tp))
3537 *walk_subtrees = false;
3538 return NULL;
3541 slot = htab_find_slot (htab, *tp, INSERT);
3542 if (*slot)
3543 return (tree) *slot;
3544 *slot = *tp;
3546 return NULL;
3550 /* Verify the GIMPLE statement chain. */
3552 void
3553 verify_stmts (void)
3555 basic_block bb;
3556 block_stmt_iterator bsi;
3557 bool err = false;
3558 htab_t htab;
3559 tree addr;
3561 timevar_push (TV_TREE_STMT_VERIFY);
3562 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3564 FOR_EACH_BB (bb)
3566 tree phi;
3567 int i;
3569 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3571 int phi_num_args = PHI_NUM_ARGS (phi);
3573 if (bb_for_stmt (phi) != bb)
3575 error ("bb_for_stmt (phi) is set to a wrong basic block");
3576 err |= true;
3579 for (i = 0; i < phi_num_args; i++)
3581 tree t = PHI_ARG_DEF (phi, i);
3582 tree addr;
3584 /* Addressable variables do have SSA_NAMEs but they
3585 are not considered gimple values. */
3586 if (TREE_CODE (t) != SSA_NAME
3587 && TREE_CODE (t) != FUNCTION_DECL
3588 && !is_gimple_val (t))
3590 error ("PHI def is not a GIMPLE value");
3591 debug_generic_stmt (phi);
3592 debug_generic_stmt (t);
3593 err |= true;
3596 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3597 if (addr)
3599 debug_generic_stmt (addr);
3600 err |= true;
3603 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3604 if (addr)
3606 error ("incorrect sharing of tree nodes");
3607 debug_generic_stmt (phi);
3608 debug_generic_stmt (addr);
3609 err |= true;
3614 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3616 tree stmt = bsi_stmt (bsi);
3618 if (bb_for_stmt (stmt) != bb)
3620 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3621 err |= true;
3624 bsi_next (&bsi);
3625 err |= verify_stmt (stmt, bsi_end_p (bsi));
3626 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3627 if (addr)
3629 error ("incorrect sharing of tree nodes");
3630 debug_generic_stmt (stmt);
3631 debug_generic_stmt (addr);
3632 err |= true;
3637 if (err)
3638 internal_error ("verify_stmts failed");
3640 htab_delete (htab);
3641 timevar_pop (TV_TREE_STMT_VERIFY);
3645 /* Verifies that the flow information is OK. */
3647 static int
3648 tree_verify_flow_info (void)
3650 int err = 0;
3651 basic_block bb;
3652 block_stmt_iterator bsi;
3653 tree stmt;
3654 edge e;
3655 edge_iterator ei;
3657 if (ENTRY_BLOCK_PTR->stmt_list)
3659 error ("ENTRY_BLOCK has a statement list associated with it");
3660 err = 1;
3663 if (EXIT_BLOCK_PTR->stmt_list)
3665 error ("EXIT_BLOCK has a statement list associated with it");
3666 err = 1;
3669 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3670 if (e->flags & EDGE_FALLTHRU)
3672 error ("fallthru to exit from bb %d", e->src->index);
3673 err = 1;
3676 FOR_EACH_BB (bb)
3678 bool found_ctrl_stmt = false;
3680 stmt = NULL_TREE;
3682 /* Skip labels on the start of basic block. */
3683 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3685 tree prev_stmt = stmt;
3687 stmt = bsi_stmt (bsi);
3689 if (TREE_CODE (stmt) != LABEL_EXPR)
3690 break;
3692 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3694 error ("nonlocal label ");
3695 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3696 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3697 bb->index);
3698 err = 1;
3701 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3703 error ("label ");
3704 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3705 fprintf (stderr, " to block does not match in bb %d",
3706 bb->index);
3707 err = 1;
3710 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3711 != current_function_decl)
3713 error ("label ");
3714 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3715 fprintf (stderr, " has incorrect context in bb %d",
3716 bb->index);
3717 err = 1;
3721 /* Verify that body of basic block BB is free of control flow. */
3722 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3724 tree stmt = bsi_stmt (bsi);
3726 if (found_ctrl_stmt)
3728 error ("control flow in the middle of basic block %d",
3729 bb->index);
3730 err = 1;
3733 if (stmt_ends_bb_p (stmt))
3734 found_ctrl_stmt = true;
3736 if (TREE_CODE (stmt) == LABEL_EXPR)
3738 error ("label ");
3739 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3740 fprintf (stderr, " in the middle of basic block %d", bb->index);
3741 err = 1;
3745 bsi = bsi_last (bb);
3746 if (bsi_end_p (bsi))
3747 continue;
3749 stmt = bsi_stmt (bsi);
3751 err |= verify_eh_edges (stmt);
3753 if (is_ctrl_stmt (stmt))
3755 FOR_EACH_EDGE (e, ei, bb->succs)
3756 if (e->flags & EDGE_FALLTHRU)
3758 error ("fallthru edge after a control statement in bb %d",
3759 bb->index);
3760 err = 1;
3764 if (TREE_CODE (stmt) != COND_EXPR)
3766 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3767 after anything else but if statement. */
3768 FOR_EACH_EDGE (e, ei, bb->succs)
3769 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3771 error ("true/false edge after a non-COND_EXPR in bb %d",
3772 bb->index);
3773 err = 1;
3777 switch (TREE_CODE (stmt))
3779 case COND_EXPR:
3781 edge true_edge;
3782 edge false_edge;
3783 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3784 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3786 error ("structured COND_EXPR at the end of bb %d", bb->index);
3787 err = 1;
3790 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3792 if (!true_edge || !false_edge
3793 || !(true_edge->flags & EDGE_TRUE_VALUE)
3794 || !(false_edge->flags & EDGE_FALSE_VALUE)
3795 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3796 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3797 || EDGE_COUNT (bb->succs) >= 3)
3799 error ("wrong outgoing edge flags at end of bb %d",
3800 bb->index);
3801 err = 1;
3804 if (!has_label_p (true_edge->dest,
3805 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3807 error ("%<then%> label does not match edge at end of bb %d",
3808 bb->index);
3809 err = 1;
3812 if (!has_label_p (false_edge->dest,
3813 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3815 error ("%<else%> label does not match edge at end of bb %d",
3816 bb->index);
3817 err = 1;
3820 break;
3822 case GOTO_EXPR:
3823 if (simple_goto_p (stmt))
3825 error ("explicit goto at end of bb %d", bb->index);
3826 err = 1;
3828 else
3830 /* FIXME. We should double check that the labels in the
3831 destination blocks have their address taken. */
3832 FOR_EACH_EDGE (e, ei, bb->succs)
3833 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3834 | EDGE_FALSE_VALUE))
3835 || !(e->flags & EDGE_ABNORMAL))
3837 error ("wrong outgoing edge flags at end of bb %d",
3838 bb->index);
3839 err = 1;
3842 break;
3844 case RETURN_EXPR:
3845 if (!single_succ_p (bb)
3846 || (single_succ_edge (bb)->flags
3847 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3848 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3850 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3851 err = 1;
3853 if (single_succ (bb) != EXIT_BLOCK_PTR)
3855 error ("return edge does not point to exit in bb %d",
3856 bb->index);
3857 err = 1;
3859 break;
3861 case SWITCH_EXPR:
3863 tree prev;
3864 edge e;
3865 size_t i, n;
3866 tree vec;
3868 vec = SWITCH_LABELS (stmt);
3869 n = TREE_VEC_LENGTH (vec);
3871 /* Mark all the destination basic blocks. */
3872 for (i = 0; i < n; ++i)
3874 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3875 basic_block label_bb = label_to_block (lab);
3877 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3878 label_bb->aux = (void *)1;
3881 /* Verify that the case labels are sorted. */
3882 prev = TREE_VEC_ELT (vec, 0);
3883 for (i = 1; i < n - 1; ++i)
3885 tree c = TREE_VEC_ELT (vec, i);
3886 if (! CASE_LOW (c))
3888 error ("found default case not at end of case vector");
3889 err = 1;
3890 continue;
3892 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3894 error ("case labels not sorted: ");
3895 print_generic_expr (stderr, prev, 0);
3896 fprintf (stderr," is greater than ");
3897 print_generic_expr (stderr, c, 0);
3898 fprintf (stderr," but comes before it.\n");
3899 err = 1;
3901 prev = c;
3903 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3905 error ("no default case found at end of case vector");
3906 err = 1;
3909 FOR_EACH_EDGE (e, ei, bb->succs)
3911 if (!e->dest->aux)
3913 error ("extra outgoing edge %d->%d",
3914 bb->index, e->dest->index);
3915 err = 1;
3917 e->dest->aux = (void *)2;
3918 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3919 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3921 error ("wrong outgoing edge flags at end of bb %d",
3922 bb->index);
3923 err = 1;
3927 /* Check that we have all of them. */
3928 for (i = 0; i < n; ++i)
3930 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3931 basic_block label_bb = label_to_block (lab);
3933 if (label_bb->aux != (void *)2)
3935 error ("missing edge %i->%i",
3936 bb->index, label_bb->index);
3937 err = 1;
3941 FOR_EACH_EDGE (e, ei, bb->succs)
3942 e->dest->aux = (void *)0;
3945 default: ;
3949 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3950 verify_dominators (CDI_DOMINATORS);
3952 return err;
3956 /* Updates phi nodes after creating a forwarder block joined
3957 by edge FALLTHRU. */
3959 static void
3960 tree_make_forwarder_block (edge fallthru)
3962 edge e;
3963 edge_iterator ei;
3964 basic_block dummy, bb;
3965 tree phi, new_phi, var;
3967 dummy = fallthru->src;
3968 bb = fallthru->dest;
3970 if (single_pred_p (bb))
3971 return;
3973 /* If we redirected a branch we must create new phi nodes at the
3974 start of BB. */
3975 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3977 var = PHI_RESULT (phi);
3978 new_phi = create_phi_node (var, bb);
3979 SSA_NAME_DEF_STMT (var) = new_phi;
3980 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3981 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3984 /* Ensure that the PHI node chain is in the same order. */
3985 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3987 /* Add the arguments we have stored on edges. */
3988 FOR_EACH_EDGE (e, ei, bb->preds)
3990 if (e == fallthru)
3991 continue;
3993 flush_pending_stmts (e);
3998 /* Return a non-special label in the head of basic block BLOCK.
3999 Create one if it doesn't exist. */
4001 tree
4002 tree_block_label (basic_block bb)
4004 block_stmt_iterator i, s = bsi_start (bb);
4005 bool first = true;
4006 tree label, stmt;
4008 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4010 stmt = bsi_stmt (i);
4011 if (TREE_CODE (stmt) != LABEL_EXPR)
4012 break;
4013 label = LABEL_EXPR_LABEL (stmt);
4014 if (!DECL_NONLOCAL (label))
4016 if (!first)
4017 bsi_move_before (&i, &s);
4018 return label;
4022 label = create_artificial_label ();
4023 stmt = build1 (LABEL_EXPR, void_type_node, label);
4024 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4025 return label;
4029 /* Attempt to perform edge redirection by replacing a possibly complex
4030 jump instruction by a goto or by removing the jump completely.
4031 This can apply only if all edges now point to the same block. The
4032 parameters and return values are equivalent to
4033 redirect_edge_and_branch. */
4035 static edge
4036 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4038 basic_block src = e->src;
4039 block_stmt_iterator b;
4040 tree stmt;
4042 /* We can replace or remove a complex jump only when we have exactly
4043 two edges. */
4044 if (EDGE_COUNT (src->succs) != 2
4045 /* Verify that all targets will be TARGET. Specifically, the
4046 edge that is not E must also go to TARGET. */
4047 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4048 return NULL;
4050 b = bsi_last (src);
4051 if (bsi_end_p (b))
4052 return NULL;
4053 stmt = bsi_stmt (b);
4055 if (TREE_CODE (stmt) == COND_EXPR
4056 || TREE_CODE (stmt) == SWITCH_EXPR)
4058 bsi_remove (&b, true);
4059 e = ssa_redirect_edge (e, target);
4060 e->flags = EDGE_FALLTHRU;
4061 return e;
4064 return NULL;
4068 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4069 edge representing the redirected branch. */
4071 static edge
4072 tree_redirect_edge_and_branch (edge e, basic_block dest)
4074 basic_block bb = e->src;
4075 block_stmt_iterator bsi;
4076 edge ret;
4077 tree label, stmt;
4079 if (e->flags & EDGE_ABNORMAL)
4080 return NULL;
4082 if (e->src != ENTRY_BLOCK_PTR
4083 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4084 return ret;
4086 if (e->dest == dest)
4087 return NULL;
4089 label = tree_block_label (dest);
4091 bsi = bsi_last (bb);
4092 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4094 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4096 case COND_EXPR:
4097 stmt = (e->flags & EDGE_TRUE_VALUE
4098 ? COND_EXPR_THEN (stmt)
4099 : COND_EXPR_ELSE (stmt));
4100 GOTO_DESTINATION (stmt) = label;
4101 break;
4103 case GOTO_EXPR:
4104 /* No non-abnormal edges should lead from a non-simple goto, and
4105 simple ones should be represented implicitly. */
4106 gcc_unreachable ();
4108 case SWITCH_EXPR:
4110 tree cases = get_cases_for_edge (e, stmt);
4112 /* If we have a list of cases associated with E, then use it
4113 as it's a lot faster than walking the entire case vector. */
4114 if (cases)
4116 edge e2 = find_edge (e->src, dest);
4117 tree last, first;
4119 first = cases;
4120 while (cases)
4122 last = cases;
4123 CASE_LABEL (cases) = label;
4124 cases = TREE_CHAIN (cases);
4127 /* If there was already an edge in the CFG, then we need
4128 to move all the cases associated with E to E2. */
4129 if (e2)
4131 tree cases2 = get_cases_for_edge (e2, stmt);
4133 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4134 TREE_CHAIN (cases2) = first;
4137 else
4139 tree vec = SWITCH_LABELS (stmt);
4140 size_t i, n = TREE_VEC_LENGTH (vec);
4142 for (i = 0; i < n; i++)
4144 tree elt = TREE_VEC_ELT (vec, i);
4146 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4147 CASE_LABEL (elt) = label;
4151 break;
4154 case RETURN_EXPR:
4155 bsi_remove (&bsi, true);
4156 e->flags |= EDGE_FALLTHRU;
4157 break;
4159 default:
4160 /* Otherwise it must be a fallthru edge, and we don't need to
4161 do anything besides redirecting it. */
4162 gcc_assert (e->flags & EDGE_FALLTHRU);
4163 break;
4166 /* Update/insert PHI nodes as necessary. */
4168 /* Now update the edges in the CFG. */
4169 e = ssa_redirect_edge (e, dest);
4171 return e;
4175 /* Simple wrapper, as we can always redirect fallthru edges. */
4177 static basic_block
4178 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4180 e = tree_redirect_edge_and_branch (e, dest);
4181 gcc_assert (e);
4183 return NULL;
4187 /* Splits basic block BB after statement STMT (but at least after the
4188 labels). If STMT is NULL, BB is split just after the labels. */
4190 static basic_block
4191 tree_split_block (basic_block bb, void *stmt)
4193 block_stmt_iterator bsi;
4194 tree_stmt_iterator tsi_tgt;
4195 tree act;
4196 basic_block new_bb;
4197 edge e;
4198 edge_iterator ei;
4200 new_bb = create_empty_bb (bb);
4202 /* Redirect the outgoing edges. */
4203 new_bb->succs = bb->succs;
4204 bb->succs = NULL;
4205 FOR_EACH_EDGE (e, ei, new_bb->succs)
4206 e->src = new_bb;
4208 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4209 stmt = NULL;
4211 /* Move everything from BSI to the new basic block. */
4212 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4214 act = bsi_stmt (bsi);
4215 if (TREE_CODE (act) == LABEL_EXPR)
4216 continue;
4218 if (!stmt)
4219 break;
4221 if (stmt == act)
4223 bsi_next (&bsi);
4224 break;
4228 if (bsi_end_p (bsi))
4229 return new_bb;
4231 /* Split the statement list - avoid re-creating new containers as this
4232 brings ugly quadratic memory consumption in the inliner.
4233 (We are still quadratic since we need to update stmt BB pointers,
4234 sadly.) */
4235 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4236 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4237 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4238 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4240 return new_bb;
4244 /* Moves basic block BB after block AFTER. */
4246 static bool
4247 tree_move_block_after (basic_block bb, basic_block after)
4249 if (bb->prev_bb == after)
4250 return true;
4252 unlink_block (bb);
4253 link_block (bb, after);
4255 return true;
4259 /* Return true if basic_block can be duplicated. */
4261 static bool
4262 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4264 return true;
4268 /* Create a duplicate of the basic block BB. NOTE: This does not
4269 preserve SSA form. */
4271 static basic_block
4272 tree_duplicate_bb (basic_block bb)
4274 basic_block new_bb;
4275 block_stmt_iterator bsi, bsi_tgt;
4276 tree phi;
4278 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4280 /* Copy the PHI nodes. We ignore PHI node arguments here because
4281 the incoming edges have not been setup yet. */
4282 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4284 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4285 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4288 /* Keep the chain of PHI nodes in the same order so that they can be
4289 updated by ssa_redirect_edge. */
4290 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4292 bsi_tgt = bsi_start (new_bb);
4293 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4295 def_operand_p def_p;
4296 ssa_op_iter op_iter;
4297 tree stmt, copy;
4298 int region;
4300 stmt = bsi_stmt (bsi);
4301 if (TREE_CODE (stmt) == LABEL_EXPR)
4302 continue;
4304 /* Create a new copy of STMT and duplicate STMT's virtual
4305 operands. */
4306 copy = unshare_expr (stmt);
4307 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4308 copy_virtual_operands (copy, stmt);
4309 region = lookup_stmt_eh_region (stmt);
4310 if (region >= 0)
4311 add_stmt_to_eh_region (copy, region);
4313 /* Create new names for all the definitions created by COPY and
4314 add replacement mappings for each new name. */
4315 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4316 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4319 return new_bb;
4323 /* Basic block BB_COPY was created by code duplication. Add phi node
4324 arguments for edges going out of BB_COPY. The blocks that were
4325 duplicated have BB_DUPLICATED set. */
4327 void
4328 add_phi_args_after_copy_bb (basic_block bb_copy)
4330 basic_block bb, dest;
4331 edge e, e_copy;
4332 edge_iterator ei;
4333 tree phi, phi_copy, phi_next, def;
4335 bb = get_bb_original (bb_copy);
4337 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4339 if (!phi_nodes (e_copy->dest))
4340 continue;
4342 if (e_copy->dest->flags & BB_DUPLICATED)
4343 dest = get_bb_original (e_copy->dest);
4344 else
4345 dest = e_copy->dest;
4347 e = find_edge (bb, dest);
4348 if (!e)
4350 /* During loop unrolling the target of the latch edge is copied.
4351 In this case we are not looking for edge to dest, but to
4352 duplicated block whose original was dest. */
4353 FOR_EACH_EDGE (e, ei, bb->succs)
4354 if ((e->dest->flags & BB_DUPLICATED)
4355 && get_bb_original (e->dest) == dest)
4356 break;
4358 gcc_assert (e != NULL);
4361 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4362 phi;
4363 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4365 phi_next = PHI_CHAIN (phi);
4366 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4367 add_phi_arg (phi_copy, def, e_copy);
4372 /* Blocks in REGION_COPY array of length N_REGION were created by
4373 duplication of basic blocks. Add phi node arguments for edges
4374 going from these blocks. */
4376 void
4377 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4379 unsigned i;
4381 for (i = 0; i < n_region; i++)
4382 region_copy[i]->flags |= BB_DUPLICATED;
4384 for (i = 0; i < n_region; i++)
4385 add_phi_args_after_copy_bb (region_copy[i]);
4387 for (i = 0; i < n_region; i++)
4388 region_copy[i]->flags &= ~BB_DUPLICATED;
4391 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4392 important exit edge EXIT. By important we mean that no SSA name defined
4393 inside region is live over the other exit edges of the region. All entry
4394 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4395 to the duplicate of the region. SSA form, dominance and loop information
4396 is updated. The new basic blocks are stored to REGION_COPY in the same
4397 order as they had in REGION, provided that REGION_COPY is not NULL.
4398 The function returns false if it is unable to copy the region,
4399 true otherwise. */
4401 bool
4402 tree_duplicate_sese_region (edge entry, edge exit,
4403 basic_block *region, unsigned n_region,
4404 basic_block *region_copy)
4406 unsigned i, n_doms;
4407 bool free_region_copy = false, copying_header = false;
4408 struct loop *loop = entry->dest->loop_father;
4409 edge exit_copy;
4410 basic_block *doms;
4411 edge redirected;
4412 int total_freq = 0, entry_freq = 0;
4413 gcov_type total_count = 0, entry_count = 0;
4415 if (!can_copy_bbs_p (region, n_region))
4416 return false;
4418 /* Some sanity checking. Note that we do not check for all possible
4419 missuses of the functions. I.e. if you ask to copy something weird,
4420 it will work, but the state of structures probably will not be
4421 correct. */
4422 for (i = 0; i < n_region; i++)
4424 /* We do not handle subloops, i.e. all the blocks must belong to the
4425 same loop. */
4426 if (region[i]->loop_father != loop)
4427 return false;
4429 if (region[i] != entry->dest
4430 && region[i] == loop->header)
4431 return false;
4434 loop->copy = loop;
4436 /* In case the function is used for loop header copying (which is the primary
4437 use), ensure that EXIT and its copy will be new latch and entry edges. */
4438 if (loop->header == entry->dest)
4440 copying_header = true;
4441 loop->copy = loop->outer;
4443 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4444 return false;
4446 for (i = 0; i < n_region; i++)
4447 if (region[i] != exit->src
4448 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4449 return false;
4452 if (!region_copy)
4454 region_copy = XNEWVEC (basic_block, n_region);
4455 free_region_copy = true;
4458 gcc_assert (!need_ssa_update_p ());
4460 /* Record blocks outside the region that are dominated by something
4461 inside. */
4462 doms = XNEWVEC (basic_block, n_basic_blocks);
4463 initialize_original_copy_tables ();
4465 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4467 if (entry->dest->count)
4469 total_count = entry->dest->count;
4470 entry_count = entry->count;
4471 /* Fix up corner cases, to avoid division by zero or creation of negative
4472 frequencies. */
4473 if (entry_count > total_count)
4474 entry_count = total_count;
4476 else
4478 total_freq = entry->dest->frequency;
4479 entry_freq = EDGE_FREQUENCY (entry);
4480 /* Fix up corner cases, to avoid division by zero or creation of negative
4481 frequencies. */
4482 if (total_freq == 0)
4483 total_freq = 1;
4484 else if (entry_freq > total_freq)
4485 entry_freq = total_freq;
4488 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4489 split_edge_bb_loc (entry));
4490 if (total_count)
4492 scale_bbs_frequencies_gcov_type (region, n_region,
4493 total_count - entry_count,
4494 total_count);
4495 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4496 total_count);
4498 else
4500 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4501 total_freq);
4502 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4505 if (copying_header)
4507 loop->header = exit->dest;
4508 loop->latch = exit->src;
4511 /* Redirect the entry and add the phi node arguments. */
4512 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4513 gcc_assert (redirected != NULL);
4514 flush_pending_stmts (entry);
4516 /* Concerning updating of dominators: We must recount dominators
4517 for entry block and its copy. Anything that is outside of the
4518 region, but was dominated by something inside needs recounting as
4519 well. */
4520 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4521 doms[n_doms++] = get_bb_original (entry->dest);
4522 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4523 free (doms);
4525 /* Add the other PHI node arguments. */
4526 add_phi_args_after_copy (region_copy, n_region);
4528 /* Update the SSA web. */
4529 update_ssa (TODO_update_ssa);
4531 if (free_region_copy)
4532 free (region_copy);
4534 free_original_copy_tables ();
4535 return true;
4539 DEF_VEC_P(basic_block);
4540 DEF_VEC_ALLOC_P(basic_block,heap);
4543 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4544 adding blocks when the dominator traversal reaches EXIT. This
4545 function silently assumes that ENTRY strictly dominates EXIT. */
4547 static void
4548 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4549 VEC(basic_block,heap) **bbs_p)
4551 basic_block son;
4553 for (son = first_dom_son (CDI_DOMINATORS, entry);
4554 son;
4555 son = next_dom_son (CDI_DOMINATORS, son))
4557 VEC_safe_push (basic_block, heap, *bbs_p, son);
4558 if (son != exit)
4559 gather_blocks_in_sese_region (son, exit, bbs_p);
4564 struct move_stmt_d
4566 tree block;
4567 tree from_context;
4568 tree to_context;
4569 bitmap vars_to_remove;
4570 htab_t new_label_map;
4571 bool remap_decls_p;
4574 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4575 contained in *TP and change the DECL_CONTEXT of every local
4576 variable referenced in *TP. */
4578 static tree
4579 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4581 struct move_stmt_d *p = (struct move_stmt_d *) data;
4582 tree t = *tp;
4584 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4585 TREE_BLOCK (t) = p->block;
4587 if (OMP_DIRECTIVE_P (t)
4588 && TREE_CODE (t) != OMP_RETURN
4589 && TREE_CODE (t) != OMP_CONTINUE)
4591 /* Do not remap variables inside OMP directives. Variables
4592 referenced in clauses and directive header belong to the
4593 parent function and should not be moved into the child
4594 function. */
4595 bool save_remap_decls_p = p->remap_decls_p;
4596 p->remap_decls_p = false;
4597 *walk_subtrees = 0;
4599 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4601 p->remap_decls_p = save_remap_decls_p;
4603 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4605 if (TREE_CODE (t) == LABEL_DECL)
4607 if (p->new_label_map)
4609 struct tree_map in, *out;
4610 in.from = t;
4611 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4612 if (out)
4613 *tp = t = out->to;
4616 DECL_CONTEXT (t) = p->to_context;
4618 else if (p->remap_decls_p)
4620 DECL_CONTEXT (t) = p->to_context;
4622 if (TREE_CODE (t) == VAR_DECL)
4624 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4625 f->unexpanded_var_list
4626 = tree_cons (0, t, f->unexpanded_var_list);
4628 /* Mark T to be removed from the original function,
4629 otherwise it will be given a DECL_RTL when the
4630 original function is expanded. */
4631 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4635 else if (TYPE_P (t))
4636 *walk_subtrees = 0;
4638 return NULL_TREE;
4642 /* Move basic block BB from function CFUN to function DEST_FN. The
4643 block is moved out of the original linked list and placed after
4644 block AFTER in the new list. Also, the block is removed from the
4645 original array of blocks and placed in DEST_FN's array of blocks.
4646 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4647 updated to reflect the moved edges.
4649 On exit, local variables that need to be removed from
4650 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4652 static void
4653 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4654 basic_block after, bool update_edge_count_p,
4655 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4657 struct control_flow_graph *cfg;
4658 edge_iterator ei;
4659 edge e;
4660 block_stmt_iterator si;
4661 struct move_stmt_d d;
4662 unsigned old_len, new_len;
4663 basic_block *addr;
4665 /* Link BB to the new linked list. */
4666 move_block_after (bb, after);
4668 /* Update the edge count in the corresponding flowgraphs. */
4669 if (update_edge_count_p)
4670 FOR_EACH_EDGE (e, ei, bb->succs)
4672 cfun->cfg->x_n_edges--;
4673 dest_cfun->cfg->x_n_edges++;
4676 /* Remove BB from the original basic block array. */
4677 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4678 cfun->cfg->x_n_basic_blocks--;
4680 /* Grow DEST_CFUN's basic block array if needed. */
4681 cfg = dest_cfun->cfg;
4682 cfg->x_n_basic_blocks++;
4683 if (bb->index > cfg->x_last_basic_block)
4684 cfg->x_last_basic_block = bb->index;
4686 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4687 if ((unsigned) cfg->x_last_basic_block >= old_len)
4689 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4690 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4691 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4692 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4695 VEC_replace (basic_block, cfg->x_basic_block_info,
4696 cfg->x_last_basic_block, bb);
4698 /* The statements in BB need to be associated with a new TREE_BLOCK.
4699 Labels need to be associated with a new label-to-block map. */
4700 memset (&d, 0, sizeof (d));
4701 d.vars_to_remove = vars_to_remove;
4703 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4705 tree stmt = bsi_stmt (si);
4706 int region;
4708 d.from_context = cfun->decl;
4709 d.to_context = dest_cfun->decl;
4710 d.remap_decls_p = true;
4711 d.new_label_map = new_label_map;
4712 if (TREE_BLOCK (stmt))
4713 d.block = DECL_INITIAL (dest_cfun->decl);
4715 walk_tree (&stmt, move_stmt_r, &d, NULL);
4717 if (TREE_CODE (stmt) == LABEL_EXPR)
4719 tree label = LABEL_EXPR_LABEL (stmt);
4720 int uid = LABEL_DECL_UID (label);
4722 gcc_assert (uid > -1);
4724 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4725 if (old_len <= (unsigned) uid)
4727 new_len = 3 * uid / 2;
4728 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4729 new_len);
4730 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4731 memset (&addr[old_len], 0,
4732 sizeof (basic_block) * (new_len - old_len));
4735 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4736 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4738 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4740 if (uid >= dest_cfun->last_label_uid)
4741 dest_cfun->last_label_uid = uid + 1;
4743 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4744 TREE_OPERAND (stmt, 0) =
4745 build_int_cst (NULL_TREE,
4746 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4747 + eh_offset);
4749 region = lookup_stmt_eh_region (stmt);
4750 if (region >= 0)
4752 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4753 remove_stmt_from_eh_region (stmt);
4758 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4759 the outermost EH region. Use REGION as the incoming base EH region. */
4761 static int
4762 find_outermost_region_in_block (struct function *src_cfun,
4763 basic_block bb, int region)
4765 block_stmt_iterator si;
4767 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4769 tree stmt = bsi_stmt (si);
4770 int stmt_region;
4772 if (TREE_CODE (stmt) == RESX_EXPR)
4773 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4774 else
4775 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4776 if (stmt_region > 0)
4778 if (region < 0)
4779 region = stmt_region;
4780 else if (stmt_region != region)
4782 region = eh_region_outermost (src_cfun, stmt_region, region);
4783 gcc_assert (region != -1);
4788 return region;
4791 static tree
4792 new_label_mapper (tree decl, void *data)
4794 htab_t hash = (htab_t) data;
4795 struct tree_map *m;
4796 void **slot;
4798 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4800 m = xmalloc (sizeof (struct tree_map));
4801 m->hash = DECL_UID (decl);
4802 m->from = decl;
4803 m->to = create_artificial_label ();
4804 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4806 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4807 gcc_assert (*slot == NULL);
4809 *slot = m;
4811 return m->to;
4814 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4815 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4816 single basic block in the original CFG and the new basic block is
4817 returned. DEST_CFUN must not have a CFG yet.
4819 Note that the region need not be a pure SESE region. Blocks inside
4820 the region may contain calls to abort/exit. The only restriction
4821 is that ENTRY_BB should be the only entry point and it must
4822 dominate EXIT_BB.
4824 All local variables referenced in the region are assumed to be in
4825 the corresponding BLOCK_VARS and unexpanded variable lists
4826 associated with DEST_CFUN. */
4828 basic_block
4829 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4830 basic_block exit_bb)
4832 VEC(basic_block,heap) *bbs;
4833 basic_block after, bb, *entry_pred, *exit_succ;
4834 struct function *saved_cfun;
4835 int *entry_flag, *exit_flag, eh_offset;
4836 unsigned i, num_entry_edges, num_exit_edges;
4837 edge e;
4838 edge_iterator ei;
4839 bitmap vars_to_remove;
4840 htab_t new_label_map;
4842 saved_cfun = cfun;
4844 /* Collect all the blocks in the region. Manually add ENTRY_BB
4845 because it won't be added by dfs_enumerate_from. */
4846 calculate_dominance_info (CDI_DOMINATORS);
4848 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4849 region. */
4850 gcc_assert (entry_bb != exit_bb
4851 && (!exit_bb
4852 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4854 bbs = NULL;
4855 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4856 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4858 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4859 the predecessor edges to ENTRY_BB and the successor edges to
4860 EXIT_BB so that we can re-attach them to the new basic block that
4861 will replace the region. */
4862 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4863 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4864 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4865 i = 0;
4866 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4868 entry_flag[i] = e->flags;
4869 entry_pred[i++] = e->src;
4870 remove_edge (e);
4873 if (exit_bb)
4875 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4876 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4877 sizeof (basic_block));
4878 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4879 i = 0;
4880 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4882 exit_flag[i] = e->flags;
4883 exit_succ[i++] = e->dest;
4884 remove_edge (e);
4887 else
4889 num_exit_edges = 0;
4890 exit_succ = NULL;
4891 exit_flag = NULL;
4894 /* Switch context to the child function to initialize DEST_FN's CFG. */
4895 gcc_assert (dest_cfun->cfg == NULL);
4896 cfun = dest_cfun;
4898 init_empty_tree_cfg ();
4900 /* Initialize EH information for the new function. */
4901 eh_offset = 0;
4902 new_label_map = NULL;
4903 if (saved_cfun->eh)
4905 int region = -1;
4907 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4908 region = find_outermost_region_in_block (saved_cfun, bb, region);
4910 init_eh_for_function ();
4911 if (region != -1)
4913 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4914 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4915 new_label_map, region, 0);
4919 cfun = saved_cfun;
4921 /* Move blocks from BBS into DEST_CFUN. */
4922 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4923 after = dest_cfun->cfg->x_entry_block_ptr;
4924 vars_to_remove = BITMAP_ALLOC (NULL);
4925 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4927 /* No need to update edge counts on the last block. It has
4928 already been updated earlier when we detached the region from
4929 the original CFG. */
4930 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4931 new_label_map, eh_offset);
4932 after = bb;
4935 if (new_label_map)
4936 htab_delete (new_label_map);
4938 /* Remove the variables marked in VARS_TO_REMOVE from
4939 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4940 DECL_RTL in the context of CFUN. */
4941 if (!bitmap_empty_p (vars_to_remove))
4943 tree *p;
4945 for (p = &cfun->unexpanded_var_list; *p; )
4947 tree var = TREE_VALUE (*p);
4948 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4950 *p = TREE_CHAIN (*p);
4951 continue;
4954 p = &TREE_CHAIN (*p);
4958 BITMAP_FREE (vars_to_remove);
4960 /* Rewire the entry and exit blocks. The successor to the entry
4961 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4962 the child function. Similarly, the predecessor of DEST_FN's
4963 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4964 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4965 various CFG manipulation function get to the right CFG.
4967 FIXME, this is silly. The CFG ought to become a parameter to
4968 these helpers. */
4969 cfun = dest_cfun;
4970 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4971 if (exit_bb)
4972 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4973 cfun = saved_cfun;
4975 /* Back in the original function, the SESE region has disappeared,
4976 create a new basic block in its place. */
4977 bb = create_empty_bb (entry_pred[0]);
4978 for (i = 0; i < num_entry_edges; i++)
4979 make_edge (entry_pred[i], bb, entry_flag[i]);
4981 for (i = 0; i < num_exit_edges; i++)
4982 make_edge (bb, exit_succ[i], exit_flag[i]);
4984 if (exit_bb)
4986 free (exit_flag);
4987 free (exit_succ);
4989 free (entry_flag);
4990 free (entry_pred);
4991 free_dominance_info (CDI_DOMINATORS);
4992 free_dominance_info (CDI_POST_DOMINATORS);
4993 VEC_free (basic_block, heap, bbs);
4995 return bb;
4999 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5001 void
5002 dump_function_to_file (tree fn, FILE *file, int flags)
5004 tree arg, vars, var;
5005 bool ignore_topmost_bind = false, any_var = false;
5006 basic_block bb;
5007 tree chain;
5008 struct function *saved_cfun;
5010 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5012 arg = DECL_ARGUMENTS (fn);
5013 while (arg)
5015 print_generic_expr (file, arg, dump_flags);
5016 if (TREE_CHAIN (arg))
5017 fprintf (file, ", ");
5018 arg = TREE_CHAIN (arg);
5020 fprintf (file, ")\n");
5022 if (flags & TDF_DETAILS)
5023 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5024 if (flags & TDF_RAW)
5026 dump_node (fn, TDF_SLIM | flags, file);
5027 return;
5030 /* Switch CFUN to point to FN. */
5031 saved_cfun = cfun;
5032 cfun = DECL_STRUCT_FUNCTION (fn);
5034 /* When GIMPLE is lowered, the variables are no longer available in
5035 BIND_EXPRs, so display them separately. */
5036 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5038 ignore_topmost_bind = true;
5040 fprintf (file, "{\n");
5041 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5043 var = TREE_VALUE (vars);
5045 print_generic_decl (file, var, flags);
5046 fprintf (file, "\n");
5048 any_var = true;
5052 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5054 /* Make a CFG based dump. */
5055 check_bb_profile (ENTRY_BLOCK_PTR, file);
5056 if (!ignore_topmost_bind)
5057 fprintf (file, "{\n");
5059 if (any_var && n_basic_blocks)
5060 fprintf (file, "\n");
5062 FOR_EACH_BB (bb)
5063 dump_generic_bb (file, bb, 2, flags);
5065 fprintf (file, "}\n");
5066 check_bb_profile (EXIT_BLOCK_PTR, file);
5068 else
5070 int indent;
5072 /* Make a tree based dump. */
5073 chain = DECL_SAVED_TREE (fn);
5075 if (chain && TREE_CODE (chain) == BIND_EXPR)
5077 if (ignore_topmost_bind)
5079 chain = BIND_EXPR_BODY (chain);
5080 indent = 2;
5082 else
5083 indent = 0;
5085 else
5087 if (!ignore_topmost_bind)
5088 fprintf (file, "{\n");
5089 indent = 2;
5092 if (any_var)
5093 fprintf (file, "\n");
5095 print_generic_stmt_indented (file, chain, flags, indent);
5096 if (ignore_topmost_bind)
5097 fprintf (file, "}\n");
5100 fprintf (file, "\n\n");
5102 /* Restore CFUN. */
5103 cfun = saved_cfun;
5107 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5109 void
5110 debug_function (tree fn, int flags)
5112 dump_function_to_file (fn, stderr, flags);
5116 /* Pretty print of the loops intermediate representation. */
5117 static void print_loop (FILE *, struct loop *, int);
5118 static void print_pred_bbs (FILE *, basic_block bb);
5119 static void print_succ_bbs (FILE *, basic_block bb);
5122 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5124 static void
5125 print_pred_bbs (FILE *file, basic_block bb)
5127 edge e;
5128 edge_iterator ei;
5130 FOR_EACH_EDGE (e, ei, bb->preds)
5131 fprintf (file, "bb_%d ", e->src->index);
5135 /* Print on FILE the indexes for the successors of basic_block BB. */
5137 static void
5138 print_succ_bbs (FILE *file, basic_block bb)
5140 edge e;
5141 edge_iterator ei;
5143 FOR_EACH_EDGE (e, ei, bb->succs)
5144 fprintf (file, "bb_%d ", e->dest->index);
5148 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5150 static void
5151 print_loop (FILE *file, struct loop *loop, int indent)
5153 char *s_indent;
5154 basic_block bb;
5156 if (loop == NULL)
5157 return;
5159 s_indent = (char *) alloca ((size_t) indent + 1);
5160 memset ((void *) s_indent, ' ', (size_t) indent);
5161 s_indent[indent] = '\0';
5163 /* Print the loop's header. */
5164 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5166 /* Print the loop's body. */
5167 fprintf (file, "%s{\n", s_indent);
5168 FOR_EACH_BB (bb)
5169 if (bb->loop_father == loop)
5171 /* Print the basic_block's header. */
5172 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5173 print_pred_bbs (file, bb);
5174 fprintf (file, "}, succs = {");
5175 print_succ_bbs (file, bb);
5176 fprintf (file, "})\n");
5178 /* Print the basic_block's body. */
5179 fprintf (file, "%s {\n", s_indent);
5180 tree_dump_bb (bb, file, indent + 4);
5181 fprintf (file, "%s }\n", s_indent);
5184 print_loop (file, loop->inner, indent + 2);
5185 fprintf (file, "%s}\n", s_indent);
5186 print_loop (file, loop->next, indent);
5190 /* Follow a CFG edge from the entry point of the program, and on entry
5191 of a loop, pretty print the loop structure on FILE. */
5193 void
5194 print_loop_ir (FILE *file)
5196 basic_block bb;
5198 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5199 if (bb && bb->loop_father)
5200 print_loop (file, bb->loop_father, 0);
5204 /* Debugging loops structure at tree level. */
5206 void
5207 debug_loop_ir (void)
5209 print_loop_ir (stderr);
5213 /* Return true if BB ends with a call, possibly followed by some
5214 instructions that must stay with the call. Return false,
5215 otherwise. */
5217 static bool
5218 tree_block_ends_with_call_p (basic_block bb)
5220 block_stmt_iterator bsi = bsi_last (bb);
5221 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5225 /* Return true if BB ends with a conditional branch. Return false,
5226 otherwise. */
5228 static bool
5229 tree_block_ends_with_condjump_p (basic_block bb)
5231 tree stmt = last_stmt (bb);
5232 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5236 /* Return true if we need to add fake edge to exit at statement T.
5237 Helper function for tree_flow_call_edges_add. */
5239 static bool
5240 need_fake_edge_p (tree t)
5242 tree call;
5244 /* NORETURN and LONGJMP calls already have an edge to exit.
5245 CONST and PURE calls do not need one.
5246 We don't currently check for CONST and PURE here, although
5247 it would be a good idea, because those attributes are
5248 figured out from the RTL in mark_constant_function, and
5249 the counter incrementation code from -fprofile-arcs
5250 leads to different results from -fbranch-probabilities. */
5251 call = get_call_expr_in (t);
5252 if (call
5253 && !(call_expr_flags (call) & ECF_NORETURN))
5254 return true;
5256 if (TREE_CODE (t) == ASM_EXPR
5257 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5258 return true;
5260 return false;
5264 /* Add fake edges to the function exit for any non constant and non
5265 noreturn calls, volatile inline assembly in the bitmap of blocks
5266 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5267 the number of blocks that were split.
5269 The goal is to expose cases in which entering a basic block does
5270 not imply that all subsequent instructions must be executed. */
5272 static int
5273 tree_flow_call_edges_add (sbitmap blocks)
5275 int i;
5276 int blocks_split = 0;
5277 int last_bb = last_basic_block;
5278 bool check_last_block = false;
5280 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5281 return 0;
5283 if (! blocks)
5284 check_last_block = true;
5285 else
5286 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5288 /* In the last basic block, before epilogue generation, there will be
5289 a fallthru edge to EXIT. Special care is required if the last insn
5290 of the last basic block is a call because make_edge folds duplicate
5291 edges, which would result in the fallthru edge also being marked
5292 fake, which would result in the fallthru edge being removed by
5293 remove_fake_edges, which would result in an invalid CFG.
5295 Moreover, we can't elide the outgoing fake edge, since the block
5296 profiler needs to take this into account in order to solve the minimal
5297 spanning tree in the case that the call doesn't return.
5299 Handle this by adding a dummy instruction in a new last basic block. */
5300 if (check_last_block)
5302 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5303 block_stmt_iterator bsi = bsi_last (bb);
5304 tree t = NULL_TREE;
5305 if (!bsi_end_p (bsi))
5306 t = bsi_stmt (bsi);
5308 if (t && need_fake_edge_p (t))
5310 edge e;
5312 e = find_edge (bb, EXIT_BLOCK_PTR);
5313 if (e)
5315 bsi_insert_on_edge (e, build_empty_stmt ());
5316 bsi_commit_edge_inserts ();
5321 /* Now add fake edges to the function exit for any non constant
5322 calls since there is no way that we can determine if they will
5323 return or not... */
5324 for (i = 0; i < last_bb; i++)
5326 basic_block bb = BASIC_BLOCK (i);
5327 block_stmt_iterator bsi;
5328 tree stmt, last_stmt;
5330 if (!bb)
5331 continue;
5333 if (blocks && !TEST_BIT (blocks, i))
5334 continue;
5336 bsi = bsi_last (bb);
5337 if (!bsi_end_p (bsi))
5339 last_stmt = bsi_stmt (bsi);
5342 stmt = bsi_stmt (bsi);
5343 if (need_fake_edge_p (stmt))
5345 edge e;
5346 /* The handling above of the final block before the
5347 epilogue should be enough to verify that there is
5348 no edge to the exit block in CFG already.
5349 Calling make_edge in such case would cause us to
5350 mark that edge as fake and remove it later. */
5351 #ifdef ENABLE_CHECKING
5352 if (stmt == last_stmt)
5354 e = find_edge (bb, EXIT_BLOCK_PTR);
5355 gcc_assert (e == NULL);
5357 #endif
5359 /* Note that the following may create a new basic block
5360 and renumber the existing basic blocks. */
5361 if (stmt != last_stmt)
5363 e = split_block (bb, stmt);
5364 if (e)
5365 blocks_split++;
5367 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5369 bsi_prev (&bsi);
5371 while (!bsi_end_p (bsi));
5375 if (blocks_split)
5376 verify_flow_info ();
5378 return blocks_split;
5381 /* Purge dead abnormal call edges from basic block BB. */
5383 bool
5384 tree_purge_dead_abnormal_call_edges (basic_block bb)
5386 bool changed = tree_purge_dead_eh_edges (bb);
5388 if (current_function_has_nonlocal_label)
5390 tree stmt = last_stmt (bb);
5391 edge_iterator ei;
5392 edge e;
5394 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5395 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5397 if (e->flags & EDGE_ABNORMAL)
5399 remove_edge (e);
5400 changed = true;
5402 else
5403 ei_next (&ei);
5406 /* See tree_purge_dead_eh_edges below. */
5407 if (changed)
5408 free_dominance_info (CDI_DOMINATORS);
5411 return changed;
5414 /* Purge dead EH edges from basic block BB. */
5416 bool
5417 tree_purge_dead_eh_edges (basic_block bb)
5419 bool changed = false;
5420 edge e;
5421 edge_iterator ei;
5422 tree stmt = last_stmt (bb);
5424 if (stmt && tree_can_throw_internal (stmt))
5425 return false;
5427 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5429 if (e->flags & EDGE_EH)
5431 remove_edge (e);
5432 changed = true;
5434 else
5435 ei_next (&ei);
5438 /* Removal of dead EH edges might change dominators of not
5439 just immediate successors. E.g. when bb1 is changed so that
5440 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5441 eh edges purged by this function in:
5445 1-->2
5446 / \ |
5447 v v |
5448 3-->4 |
5450 --->5
5453 idom(bb5) must be recomputed. For now just free the dominance
5454 info. */
5455 if (changed)
5456 free_dominance_info (CDI_DOMINATORS);
5458 return changed;
5461 bool
5462 tree_purge_all_dead_eh_edges (bitmap blocks)
5464 bool changed = false;
5465 unsigned i;
5466 bitmap_iterator bi;
5468 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5470 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5473 return changed;
5476 /* This function is called whenever a new edge is created or
5477 redirected. */
5479 static void
5480 tree_execute_on_growing_pred (edge e)
5482 basic_block bb = e->dest;
5484 if (phi_nodes (bb))
5485 reserve_phi_args_for_new_edge (bb);
5488 /* This function is called immediately before edge E is removed from
5489 the edge vector E->dest->preds. */
5491 static void
5492 tree_execute_on_shrinking_pred (edge e)
5494 if (phi_nodes (e->dest))
5495 remove_phi_args (e);
5498 /*---------------------------------------------------------------------------
5499 Helper functions for Loop versioning
5500 ---------------------------------------------------------------------------*/
5502 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5503 of 'first'. Both of them are dominated by 'new_head' basic block. When
5504 'new_head' was created by 'second's incoming edge it received phi arguments
5505 on the edge by split_edge(). Later, additional edge 'e' was created to
5506 connect 'new_head' and 'first'. Now this routine adds phi args on this
5507 additional edge 'e' that new_head to second edge received as part of edge
5508 splitting.
5511 static void
5512 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5513 basic_block new_head, edge e)
5515 tree phi1, phi2;
5516 edge e2 = find_edge (new_head, second);
5518 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5519 edge, we should always have an edge from NEW_HEAD to SECOND. */
5520 gcc_assert (e2 != NULL);
5522 /* Browse all 'second' basic block phi nodes and add phi args to
5523 edge 'e' for 'first' head. PHI args are always in correct order. */
5525 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5526 phi2 && phi1;
5527 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5529 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5530 add_phi_arg (phi1, def, e);
5534 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5535 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5536 the destination of the ELSE part. */
5537 static void
5538 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5539 basic_block cond_bb, void *cond_e)
5541 block_stmt_iterator bsi;
5542 tree goto1 = NULL_TREE;
5543 tree goto2 = NULL_TREE;
5544 tree new_cond_expr = NULL_TREE;
5545 tree cond_expr = (tree) cond_e;
5546 edge e0;
5548 /* Build new conditional expr */
5549 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5550 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5551 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5553 /* Add new cond in cond_bb. */
5554 bsi = bsi_start (cond_bb);
5555 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5556 /* Adjust edges appropriately to connect new head with first head
5557 as well as second head. */
5558 e0 = single_succ_edge (cond_bb);
5559 e0->flags &= ~EDGE_FALLTHRU;
5560 e0->flags |= EDGE_FALSE_VALUE;
5563 struct cfg_hooks tree_cfg_hooks = {
5564 "tree",
5565 tree_verify_flow_info,
5566 tree_dump_bb, /* dump_bb */
5567 create_bb, /* create_basic_block */
5568 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5569 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5570 remove_bb, /* delete_basic_block */
5571 tree_split_block, /* split_block */
5572 tree_move_block_after, /* move_block_after */
5573 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5574 tree_merge_blocks, /* merge_blocks */
5575 tree_predict_edge, /* predict_edge */
5576 tree_predicted_by_p, /* predicted_by_p */
5577 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5578 tree_duplicate_bb, /* duplicate_block */
5579 tree_split_edge, /* split_edge */
5580 tree_make_forwarder_block, /* make_forward_block */
5581 NULL, /* tidy_fallthru_edge */
5582 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5583 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5584 tree_flow_call_edges_add, /* flow_call_edges_add */
5585 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5586 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5587 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5588 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5589 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5590 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5591 flush_pending_stmts /* flush_pending_stmts */
5595 /* Split all critical edges. */
5597 static unsigned int
5598 split_critical_edges (void)
5600 basic_block bb;
5601 edge e;
5602 edge_iterator ei;
5604 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5605 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5606 mappings around the calls to split_edge. */
5607 start_recording_case_labels ();
5608 FOR_ALL_BB (bb)
5610 FOR_EACH_EDGE (e, ei, bb->succs)
5611 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5613 split_edge (e);
5616 end_recording_case_labels ();
5617 return 0;
5620 struct tree_opt_pass pass_split_crit_edges =
5622 "crited", /* name */
5623 NULL, /* gate */
5624 split_critical_edges, /* execute */
5625 NULL, /* sub */
5626 NULL, /* next */
5627 0, /* static_pass_number */
5628 TV_TREE_SPLIT_EDGES, /* tv_id */
5629 PROP_cfg, /* properties required */
5630 PROP_no_crit_edges, /* properties_provided */
5631 0, /* properties_destroyed */
5632 0, /* todo_flags_start */
5633 TODO_dump_func, /* todo_flags_finish */
5634 0 /* letter */
5638 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5639 a temporary, make sure and register it to be renamed if necessary,
5640 and finally return the temporary. Put the statements to compute
5641 EXP before the current statement in BSI. */
5643 tree
5644 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5646 tree t, new_stmt, orig_stmt;
5648 if (is_gimple_val (exp))
5649 return exp;
5651 t = make_rename_temp (type, NULL);
5652 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5654 orig_stmt = bsi_stmt (*bsi);
5655 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5656 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5658 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5659 if (in_ssa_p)
5660 mark_new_vars_to_rename (new_stmt);
5662 return t;
5665 /* Build a ternary operation and gimplify it. Emit code before BSI.
5666 Return the gimple_val holding the result. */
5668 tree
5669 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5670 tree type, tree a, tree b, tree c)
5672 tree ret;
5674 ret = fold_build3 (code, type, a, b, c);
5675 STRIP_NOPS (ret);
5677 return gimplify_val (bsi, type, ret);
5680 /* Build a binary operation and gimplify it. Emit code before BSI.
5681 Return the gimple_val holding the result. */
5683 tree
5684 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5685 tree type, tree a, tree b)
5687 tree ret;
5689 ret = fold_build2 (code, type, a, b);
5690 STRIP_NOPS (ret);
5692 return gimplify_val (bsi, type, ret);
5695 /* Build a unary operation and gimplify it. Emit code before BSI.
5696 Return the gimple_val holding the result. */
5698 tree
5699 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5700 tree a)
5702 tree ret;
5704 ret = fold_build1 (code, type, a);
5705 STRIP_NOPS (ret);
5707 return gimplify_val (bsi, type, ret);
5712 /* Emit return warnings. */
5714 static unsigned int
5715 execute_warn_function_return (void)
5717 #ifdef USE_MAPPED_LOCATION
5718 source_location location;
5719 #else
5720 location_t *locus;
5721 #endif
5722 tree last;
5723 edge e;
5724 edge_iterator ei;
5726 /* If we have a path to EXIT, then we do return. */
5727 if (TREE_THIS_VOLATILE (cfun->decl)
5728 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5730 #ifdef USE_MAPPED_LOCATION
5731 location = UNKNOWN_LOCATION;
5732 #else
5733 locus = NULL;
5734 #endif
5735 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5737 last = last_stmt (e->src);
5738 if (TREE_CODE (last) == RETURN_EXPR
5739 #ifdef USE_MAPPED_LOCATION
5740 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5741 #else
5742 && (locus = EXPR_LOCUS (last)) != NULL)
5743 #endif
5744 break;
5746 #ifdef USE_MAPPED_LOCATION
5747 if (location == UNKNOWN_LOCATION)
5748 location = cfun->function_end_locus;
5749 warning (0, "%H%<noreturn%> function does return", &location);
5750 #else
5751 if (!locus)
5752 locus = &cfun->function_end_locus;
5753 warning (0, "%H%<noreturn%> function does return", locus);
5754 #endif
5757 /* If we see "return;" in some basic block, then we do reach the end
5758 without returning a value. */
5759 else if (warn_return_type
5760 && !TREE_NO_WARNING (cfun->decl)
5761 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5762 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5764 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5766 tree last = last_stmt (e->src);
5767 if (TREE_CODE (last) == RETURN_EXPR
5768 && TREE_OPERAND (last, 0) == NULL
5769 && !TREE_NO_WARNING (last))
5771 #ifdef USE_MAPPED_LOCATION
5772 location = EXPR_LOCATION (last);
5773 if (location == UNKNOWN_LOCATION)
5774 location = cfun->function_end_locus;
5775 warning (0, "%Hcontrol reaches end of non-void function", &location);
5776 #else
5777 locus = EXPR_LOCUS (last);
5778 if (!locus)
5779 locus = &cfun->function_end_locus;
5780 warning (0, "%Hcontrol reaches end of non-void function", locus);
5781 #endif
5782 TREE_NO_WARNING (cfun->decl) = 1;
5783 break;
5787 return 0;
5791 /* Given a basic block B which ends with a conditional and has
5792 precisely two successors, determine which of the edges is taken if
5793 the conditional is true and which is taken if the conditional is
5794 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5796 void
5797 extract_true_false_edges_from_block (basic_block b,
5798 edge *true_edge,
5799 edge *false_edge)
5801 edge e = EDGE_SUCC (b, 0);
5803 if (e->flags & EDGE_TRUE_VALUE)
5805 *true_edge = e;
5806 *false_edge = EDGE_SUCC (b, 1);
5808 else
5810 *false_edge = e;
5811 *true_edge = EDGE_SUCC (b, 1);
5815 struct tree_opt_pass pass_warn_function_return =
5817 NULL, /* name */
5818 NULL, /* gate */
5819 execute_warn_function_return, /* execute */
5820 NULL, /* sub */
5821 NULL, /* next */
5822 0, /* static_pass_number */
5823 0, /* tv_id */
5824 PROP_cfg, /* properties_required */
5825 0, /* properties_provided */
5826 0, /* properties_destroyed */
5827 0, /* todo_flags_start */
5828 0, /* todo_flags_finish */
5829 0 /* letter */
5832 /* Emit noreturn warnings. */
5834 static unsigned int
5835 execute_warn_function_noreturn (void)
5837 if (warn_missing_noreturn
5838 && !TREE_THIS_VOLATILE (cfun->decl)
5839 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5840 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5841 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5842 "for attribute %<noreturn%>",
5843 cfun->decl);
5844 return 0;
5847 struct tree_opt_pass pass_warn_function_noreturn =
5849 NULL, /* name */
5850 NULL, /* gate */
5851 execute_warn_function_noreturn, /* execute */
5852 NULL, /* sub */
5853 NULL, /* next */
5854 0, /* static_pass_number */
5855 0, /* tv_id */
5856 PROP_cfg, /* properties_required */
5857 0, /* properties_provided */
5858 0, /* properties_destroyed */
5859 0, /* todo_flags_start */
5860 0, /* todo_flags_finish */
5861 0 /* letter */