* env.c (parse_schedule): Reject out of range values.
[official-gcc.git] / gcc / tree-cfg.c
blob46ee1b1cf17c00bd7367df3f7780b340777ee226
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 struct edge_to_cases_elt
73 /* The edge itself. Necessary for hashing and equality tests. */
74 edge e;
76 /* The case labels associated with this edge. We link these up via
77 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78 when we destroy the hash table. This prevents problems when copying
79 SWITCH_EXPRs. */
80 tree case_labels;
83 static htab_t edge_to_cases;
85 /* CFG statistics. */
86 struct cfg_stats_d
88 long num_merged_labels;
91 static struct cfg_stats_d cfg_stats;
93 /* Nonzero if we found a computed goto while building basic blocks. */
94 static bool found_computed_goto;
96 /* Basic blocks and flowgraphs. */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
115 static inline void change_bb_for_stmt (tree t, basic_block bb);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static unsigned int
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 return 0;
234 struct tree_opt_pass pass_build_cfg =
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts, /* todo_flags_finish */
248 0 /* letter */
251 /* Search the CFG for any computed gotos. If found, factor them to a
252 common computed goto site. Also record the location of that site so
253 that we can un-factor the gotos after we have converted back to
254 normal form. */
256 static void
257 factor_computed_gotos (void)
259 basic_block bb;
260 tree factored_label_decl = NULL;
261 tree var = NULL;
262 tree factored_computed_goto_label = NULL;
263 tree factored_computed_goto = NULL;
265 /* We know there are one or more computed gotos in this function.
266 Examine the last statement in each basic block to see if the block
267 ends with a computed goto. */
269 FOR_EACH_BB (bb)
271 block_stmt_iterator bsi = bsi_last (bb);
272 tree last;
274 if (bsi_end_p (bsi))
275 continue;
276 last = bsi_stmt (bsi);
278 /* Ignore the computed goto we create when we factor the original
279 computed gotos. */
280 if (last == factored_computed_goto)
281 continue;
283 /* If the last statement is a computed goto, factor it. */
284 if (computed_goto_p (last))
286 tree assignment;
288 /* The first time we find a computed goto we need to create
289 the factored goto block and the variable each original
290 computed goto will use for their goto destination. */
291 if (! factored_computed_goto)
293 basic_block new_bb = create_empty_bb (bb);
294 block_stmt_iterator new_bsi = bsi_start (new_bb);
296 /* Create the destination of the factored goto. Each original
297 computed goto will put its desired destination into this
298 variable and jump to the label we create immediately
299 below. */
300 var = create_tmp_var (ptr_type_node, "gotovar");
302 /* Build a label for the new block which will contain the
303 factored computed goto. */
304 factored_label_decl = create_artificial_label ();
305 factored_computed_goto_label
306 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 BSI_NEW_STMT);
310 /* Build our new computed goto. */
311 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 bsi_insert_after (&new_bsi, factored_computed_goto,
313 BSI_NEW_STMT);
316 /* Copy the original computed goto's destination into VAR. */
317 assignment = build2 (MODIFY_EXPR, ptr_type_node,
318 var, GOTO_DESTINATION (last));
319 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
321 /* And re-vector the computed goto to the new destination. */
322 GOTO_DESTINATION (last) = factored_label_decl;
328 /* Build a flowgraph for the statement_list STMT_LIST. */
330 static void
331 make_blocks (tree stmt_list)
333 tree_stmt_iterator i = tsi_start (stmt_list);
334 tree stmt = NULL;
335 bool start_new_block = true;
336 bool first_stmt_of_list = true;
337 basic_block bb = ENTRY_BLOCK_PTR;
339 while (!tsi_end_p (i))
341 tree prev_stmt;
343 prev_stmt = stmt;
344 stmt = tsi_stmt (i);
346 /* If the statement starts a new basic block or if we have determined
347 in a previous pass that we need to create a new block for STMT, do
348 so now. */
349 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
351 if (!first_stmt_of_list)
352 stmt_list = tsi_split_statement_list_before (&i);
353 bb = create_basic_block (stmt_list, NULL, bb);
354 start_new_block = false;
357 /* Now add STMT to BB and create the subgraphs for special statement
358 codes. */
359 set_bb_for_stmt (stmt, bb);
361 if (computed_goto_p (stmt))
362 found_computed_goto = true;
364 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 next iteration. */
366 if (stmt_ends_bb_p (stmt))
367 start_new_block = true;
369 tsi_next (&i);
370 first_stmt_of_list = false;
375 /* Create and return a new empty basic block after bb AFTER. */
377 static basic_block
378 create_bb (void *h, void *e, basic_block after)
380 basic_block bb;
382 gcc_assert (!e);
384 /* Create and initialize a new basic block. Since alloc_block uses
385 ggc_alloc_cleared to allocate a basic block, we do not have to
386 clear the newly allocated basic block here. */
387 bb = alloc_block ();
389 bb->index = last_basic_block;
390 bb->flags = BB_NEW;
391 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
399 size_t old_size = VEC_length (basic_block, basic_block_info);
400 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401 basic_block *p;
402 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403 p = VEC_address (basic_block, basic_block_info);
404 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
407 /* Add the newly created block to the array. */
408 SET_BASIC_BLOCK (last_basic_block, bb);
410 n_basic_blocks++;
411 last_basic_block++;
413 return bb;
417 /*---------------------------------------------------------------------------
418 Edge creation
419 ---------------------------------------------------------------------------*/
421 /* Fold COND_EXPR_COND of each COND_EXPR. */
423 void
424 fold_cond_expr_cond (void)
426 basic_block bb;
428 FOR_EACH_BB (bb)
430 tree stmt = last_stmt (bb);
432 if (stmt
433 && TREE_CODE (stmt) == COND_EXPR)
435 tree cond = fold (COND_EXPR_COND (stmt));
436 if (integer_zerop (cond))
437 COND_EXPR_COND (stmt) = boolean_false_node;
438 else if (integer_onep (cond))
439 COND_EXPR_COND (stmt) = boolean_true_node;
444 /* Join all the blocks in the flowgraph. */
446 static void
447 make_edges (void)
449 basic_block bb;
450 struct omp_region *cur_region = NULL;
452 /* Create an edge from entry to the first block with executable
453 statements in it. */
454 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
456 /* Traverse the basic block array placing edges. */
457 FOR_EACH_BB (bb)
459 tree last = last_stmt (bb);
460 bool fallthru;
462 if (last)
464 enum tree_code code = TREE_CODE (last);
465 switch (code)
467 case GOTO_EXPR:
468 make_goto_expr_edges (bb);
469 fallthru = false;
470 break;
471 case RETURN_EXPR:
472 make_edge (bb, EXIT_BLOCK_PTR, 0);
473 fallthru = false;
474 break;
475 case COND_EXPR:
476 make_cond_expr_edges (bb);
477 fallthru = false;
478 break;
479 case SWITCH_EXPR:
480 make_switch_expr_edges (bb);
481 fallthru = false;
482 break;
483 case RESX_EXPR:
484 make_eh_edges (last);
485 fallthru = false;
486 break;
488 case CALL_EXPR:
489 /* If this function receives a nonlocal goto, then we need to
490 make edges from this call site to all the nonlocal goto
491 handlers. */
492 if (tree_can_make_abnormal_goto (last))
493 make_abnormal_goto_edges (bb, true);
495 /* If this statement has reachable exception handlers, then
496 create abnormal edges to them. */
497 make_eh_edges (last);
499 /* Some calls are known not to return. */
500 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
501 break;
503 case MODIFY_EXPR:
504 if (is_ctrl_altering_stmt (last))
506 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
507 CALL_EXPR may have an abnormal edge. Search the RHS for
508 this case and create any required edges. */
509 if (tree_can_make_abnormal_goto (last))
510 make_abnormal_goto_edges (bb, true);
512 make_eh_edges (last);
514 fallthru = true;
515 break;
517 case OMP_PARALLEL:
518 case OMP_FOR:
519 case OMP_SINGLE:
520 case OMP_MASTER:
521 case OMP_ORDERED:
522 case OMP_CRITICAL:
523 case OMP_SECTION:
524 cur_region = new_omp_region (bb, code, cur_region);
525 fallthru = true;
526 break;
528 case OMP_SECTIONS:
529 cur_region = new_omp_region (bb, code, cur_region);
530 fallthru = false;
531 break;
533 case OMP_RETURN:
534 /* In the case of an OMP_SECTION, the edge will go somewhere
535 other than the next block. This will be created later. */
536 cur_region->exit = bb;
537 fallthru = cur_region->type != OMP_SECTION;
538 cur_region = cur_region->outer;
539 break;
541 case OMP_CONTINUE:
542 cur_region->cont = bb;
543 switch (cur_region->type)
545 case OMP_FOR:
546 /* ??? Technically there should be a some sort of loopback
547 edge here, but it goes to a block that doesn't exist yet,
548 and without it, updating the ssa form would be a real
549 bear. Fortunately, we don't yet do ssa before expanding
550 these nodes. */
551 break;
553 case OMP_SECTIONS:
554 /* Wire up the edges into and out of the nested sections. */
555 /* ??? Similarly wrt loopback. */
557 struct omp_region *i;
558 for (i = cur_region->inner; i ; i = i->next)
560 gcc_assert (i->type == OMP_SECTION);
561 make_edge (cur_region->entry, i->entry, 0);
562 make_edge (i->exit, bb, EDGE_FALLTHRU);
565 break;
567 default:
568 gcc_unreachable ();
570 fallthru = true;
571 break;
573 default:
574 gcc_assert (!stmt_ends_bb_p (last));
575 fallthru = true;
578 else
579 fallthru = true;
581 if (fallthru)
582 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
585 if (root_omp_region)
586 free_omp_regions ();
588 /* Fold COND_EXPR_COND of each COND_EXPR. */
589 fold_cond_expr_cond ();
591 /* Clean up the graph and warn for unreachable code. */
592 cleanup_tree_cfg ();
596 /* Create the edges for a COND_EXPR starting at block BB.
597 At this point, both clauses must contain only simple gotos. */
599 static void
600 make_cond_expr_edges (basic_block bb)
602 tree entry = last_stmt (bb);
603 basic_block then_bb, else_bb;
604 tree then_label, else_label;
605 edge e;
607 gcc_assert (entry);
608 gcc_assert (TREE_CODE (entry) == COND_EXPR);
610 /* Entry basic blocks for each component. */
611 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
612 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
613 then_bb = label_to_block (then_label);
614 else_bb = label_to_block (else_label);
616 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
617 #ifdef USE_MAPPED_LOCATION
618 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
619 #else
620 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
621 #endif
622 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
623 if (e)
625 #ifdef USE_MAPPED_LOCATION
626 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
627 #else
628 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
629 #endif
633 /* Hashing routine for EDGE_TO_CASES. */
635 static hashval_t
636 edge_to_cases_hash (const void *p)
638 edge e = ((struct edge_to_cases_elt *)p)->e;
640 /* Hash on the edge itself (which is a pointer). */
641 return htab_hash_pointer (e);
644 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
645 for equality is just a pointer comparison. */
647 static int
648 edge_to_cases_eq (const void *p1, const void *p2)
650 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
651 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
653 return e1 == e2;
656 /* Called for each element in the hash table (P) as we delete the
657 edge to cases hash table.
659 Clear all the TREE_CHAINs to prevent problems with copying of
660 SWITCH_EXPRs and structure sharing rules, then free the hash table
661 element. */
663 static void
664 edge_to_cases_cleanup (void *p)
666 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
667 tree t, next;
669 for (t = elt->case_labels; t; t = next)
671 next = TREE_CHAIN (t);
672 TREE_CHAIN (t) = NULL;
674 free (p);
677 /* Start recording information mapping edges to case labels. */
679 void
680 start_recording_case_labels (void)
682 gcc_assert (edge_to_cases == NULL);
684 edge_to_cases = htab_create (37,
685 edge_to_cases_hash,
686 edge_to_cases_eq,
687 edge_to_cases_cleanup);
690 /* Return nonzero if we are recording information for case labels. */
692 static bool
693 recording_case_labels_p (void)
695 return (edge_to_cases != NULL);
698 /* Stop recording information mapping edges to case labels and
699 remove any information we have recorded. */
700 void
701 end_recording_case_labels (void)
703 htab_delete (edge_to_cases);
704 edge_to_cases = NULL;
707 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
709 static void
710 record_switch_edge (edge e, tree case_label)
712 struct edge_to_cases_elt *elt;
713 void **slot;
715 /* Build a hash table element so we can see if E is already
716 in the table. */
717 elt = XNEW (struct edge_to_cases_elt);
718 elt->e = e;
719 elt->case_labels = case_label;
721 slot = htab_find_slot (edge_to_cases, elt, INSERT);
723 if (*slot == NULL)
725 /* E was not in the hash table. Install E into the hash table. */
726 *slot = (void *)elt;
728 else
730 /* E was already in the hash table. Free ELT as we do not need it
731 anymore. */
732 free (elt);
734 /* Get the entry stored in the hash table. */
735 elt = (struct edge_to_cases_elt *) *slot;
737 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
738 TREE_CHAIN (case_label) = elt->case_labels;
739 elt->case_labels = case_label;
743 /* If we are inside a {start,end}_recording_cases block, then return
744 a chain of CASE_LABEL_EXPRs from T which reference E.
746 Otherwise return NULL. */
748 static tree
749 get_cases_for_edge (edge e, tree t)
751 struct edge_to_cases_elt elt, *elt_p;
752 void **slot;
753 size_t i, n;
754 tree vec;
756 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
757 chains available. Return NULL so the caller can detect this case. */
758 if (!recording_case_labels_p ())
759 return NULL;
761 restart:
762 elt.e = e;
763 elt.case_labels = NULL;
764 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
766 if (slot)
768 elt_p = (struct edge_to_cases_elt *)*slot;
769 return elt_p->case_labels;
772 /* If we did not find E in the hash table, then this must be the first
773 time we have been queried for information about E & T. Add all the
774 elements from T to the hash table then perform the query again. */
776 vec = SWITCH_LABELS (t);
777 n = TREE_VEC_LENGTH (vec);
778 for (i = 0; i < n; i++)
780 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
781 basic_block label_bb = label_to_block (lab);
782 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
784 goto restart;
787 /* Create the edges for a SWITCH_EXPR starting at block BB.
788 At this point, the switch body has been lowered and the
789 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
791 static void
792 make_switch_expr_edges (basic_block bb)
794 tree entry = last_stmt (bb);
795 size_t i, n;
796 tree vec;
798 vec = SWITCH_LABELS (entry);
799 n = TREE_VEC_LENGTH (vec);
801 for (i = 0; i < n; ++i)
803 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
804 basic_block label_bb = label_to_block (lab);
805 make_edge (bb, label_bb, 0);
810 /* Return the basic block holding label DEST. */
812 basic_block
813 label_to_block_fn (struct function *ifun, tree dest)
815 int uid = LABEL_DECL_UID (dest);
817 /* We would die hard when faced by an undefined label. Emit a label to
818 the very first basic block. This will hopefully make even the dataflow
819 and undefined variable warnings quite right. */
820 if ((errorcount || sorrycount) && uid < 0)
822 block_stmt_iterator bsi =
823 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
824 tree stmt;
826 stmt = build1 (LABEL_EXPR, void_type_node, dest);
827 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
828 uid = LABEL_DECL_UID (dest);
830 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
831 <= (unsigned int) uid)
832 return NULL;
833 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
836 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
837 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
839 void
840 make_abnormal_goto_edges (basic_block bb, bool for_call)
842 basic_block target_bb;
843 block_stmt_iterator bsi;
845 FOR_EACH_BB (target_bb)
846 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
848 tree target = bsi_stmt (bsi);
850 if (TREE_CODE (target) != LABEL_EXPR)
851 break;
853 target = LABEL_EXPR_LABEL (target);
855 /* Make an edge to every label block that has been marked as a
856 potential target for a computed goto or a non-local goto. */
857 if ((FORCED_LABEL (target) && !for_call)
858 || (DECL_NONLOCAL (target) && for_call))
860 make_edge (bb, target_bb, EDGE_ABNORMAL);
861 break;
866 /* Create edges for a goto statement at block BB. */
868 static void
869 make_goto_expr_edges (basic_block bb)
871 block_stmt_iterator last = bsi_last (bb);
872 tree goto_t = bsi_stmt (last);
874 /* A simple GOTO creates normal edges. */
875 if (simple_goto_p (goto_t))
877 tree dest = GOTO_DESTINATION (goto_t);
878 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
879 #ifdef USE_MAPPED_LOCATION
880 e->goto_locus = EXPR_LOCATION (goto_t);
881 #else
882 e->goto_locus = EXPR_LOCUS (goto_t);
883 #endif
884 bsi_remove (&last, true);
885 return;
888 /* A computed GOTO creates abnormal edges. */
889 make_abnormal_goto_edges (bb, false);
893 /*---------------------------------------------------------------------------
894 Flowgraph analysis
895 ---------------------------------------------------------------------------*/
897 /* Cleanup useless labels in basic blocks. This is something we wish
898 to do early because it allows us to group case labels before creating
899 the edges for the CFG, and it speeds up block statement iterators in
900 all passes later on.
901 We only run this pass once, running it more than once is probably not
902 profitable. */
904 /* A map from basic block index to the leading label of that block. */
905 static tree *label_for_bb;
907 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
908 static void
909 update_eh_label (struct eh_region *region)
911 tree old_label = get_eh_region_tree_label (region);
912 if (old_label)
914 tree new_label;
915 basic_block bb = label_to_block (old_label);
917 /* ??? After optimizing, there may be EH regions with labels
918 that have already been removed from the function body, so
919 there is no basic block for them. */
920 if (! bb)
921 return;
923 new_label = label_for_bb[bb->index];
924 set_eh_region_tree_label (region, new_label);
928 /* Given LABEL return the first label in the same basic block. */
929 static tree
930 main_block_label (tree label)
932 basic_block bb = label_to_block (label);
934 /* label_to_block possibly inserted undefined label into the chain. */
935 if (!label_for_bb[bb->index])
936 label_for_bb[bb->index] = label;
937 return label_for_bb[bb->index];
940 /* Cleanup redundant labels. This is a three-step process:
941 1) Find the leading label for each block.
942 2) Redirect all references to labels to the leading labels.
943 3) Cleanup all useless labels. */
945 void
946 cleanup_dead_labels (void)
948 basic_block bb;
949 label_for_bb = XCNEWVEC (tree, last_basic_block);
951 /* Find a suitable label for each block. We use the first user-defined
952 label if there is one, or otherwise just the first label we see. */
953 FOR_EACH_BB (bb)
955 block_stmt_iterator i;
957 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
959 tree label, stmt = bsi_stmt (i);
961 if (TREE_CODE (stmt) != LABEL_EXPR)
962 break;
964 label = LABEL_EXPR_LABEL (stmt);
966 /* If we have not yet seen a label for the current block,
967 remember this one and see if there are more labels. */
968 if (! label_for_bb[bb->index])
970 label_for_bb[bb->index] = label;
971 continue;
974 /* If we did see a label for the current block already, but it
975 is an artificially created label, replace it if the current
976 label is a user defined label. */
977 if (! DECL_ARTIFICIAL (label)
978 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
980 label_for_bb[bb->index] = label;
981 break;
986 /* Now redirect all jumps/branches to the selected label.
987 First do so for each block ending in a control statement. */
988 FOR_EACH_BB (bb)
990 tree stmt = last_stmt (bb);
991 if (!stmt)
992 continue;
994 switch (TREE_CODE (stmt))
996 case COND_EXPR:
998 tree true_branch, false_branch;
1000 true_branch = COND_EXPR_THEN (stmt);
1001 false_branch = COND_EXPR_ELSE (stmt);
1003 GOTO_DESTINATION (true_branch)
1004 = main_block_label (GOTO_DESTINATION (true_branch));
1005 GOTO_DESTINATION (false_branch)
1006 = main_block_label (GOTO_DESTINATION (false_branch));
1008 break;
1011 case SWITCH_EXPR:
1013 size_t i;
1014 tree vec = SWITCH_LABELS (stmt);
1015 size_t n = TREE_VEC_LENGTH (vec);
1017 /* Replace all destination labels. */
1018 for (i = 0; i < n; ++i)
1020 tree elt = TREE_VEC_ELT (vec, i);
1021 tree label = main_block_label (CASE_LABEL (elt));
1022 CASE_LABEL (elt) = label;
1024 break;
1027 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1028 remove them until after we've created the CFG edges. */
1029 case GOTO_EXPR:
1030 if (! computed_goto_p (stmt))
1032 GOTO_DESTINATION (stmt)
1033 = main_block_label (GOTO_DESTINATION (stmt));
1034 break;
1037 default:
1038 break;
1042 for_each_eh_region (update_eh_label);
1044 /* Finally, purge dead labels. All user-defined labels and labels that
1045 can be the target of non-local gotos and labels which have their
1046 address taken are preserved. */
1047 FOR_EACH_BB (bb)
1049 block_stmt_iterator i;
1050 tree label_for_this_bb = label_for_bb[bb->index];
1052 if (! label_for_this_bb)
1053 continue;
1055 for (i = bsi_start (bb); !bsi_end_p (i); )
1057 tree label, stmt = bsi_stmt (i);
1059 if (TREE_CODE (stmt) != LABEL_EXPR)
1060 break;
1062 label = LABEL_EXPR_LABEL (stmt);
1064 if (label == label_for_this_bb
1065 || ! DECL_ARTIFICIAL (label)
1066 || DECL_NONLOCAL (label)
1067 || FORCED_LABEL (label))
1068 bsi_next (&i);
1069 else
1070 bsi_remove (&i, true);
1074 free (label_for_bb);
1077 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1078 and scan the sorted vector of cases. Combine the ones jumping to the
1079 same label.
1080 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1082 void
1083 group_case_labels (void)
1085 basic_block bb;
1087 FOR_EACH_BB (bb)
1089 tree stmt = last_stmt (bb);
1090 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1092 tree labels = SWITCH_LABELS (stmt);
1093 int old_size = TREE_VEC_LENGTH (labels);
1094 int i, j, new_size = old_size;
1095 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1096 tree default_label;
1098 /* The default label is always the last case in a switch
1099 statement after gimplification. */
1100 default_label = CASE_LABEL (default_case);
1102 /* Look for possible opportunities to merge cases.
1103 Ignore the last element of the label vector because it
1104 must be the default case. */
1105 i = 0;
1106 while (i < old_size - 1)
1108 tree base_case, base_label, base_high;
1109 base_case = TREE_VEC_ELT (labels, i);
1111 gcc_assert (base_case);
1112 base_label = CASE_LABEL (base_case);
1114 /* Discard cases that have the same destination as the
1115 default case. */
1116 if (base_label == default_label)
1118 TREE_VEC_ELT (labels, i) = NULL_TREE;
1119 i++;
1120 new_size--;
1121 continue;
1124 base_high = CASE_HIGH (base_case) ?
1125 CASE_HIGH (base_case) : CASE_LOW (base_case);
1126 i++;
1127 /* Try to merge case labels. Break out when we reach the end
1128 of the label vector or when we cannot merge the next case
1129 label with the current one. */
1130 while (i < old_size - 1)
1132 tree merge_case = TREE_VEC_ELT (labels, i);
1133 tree merge_label = CASE_LABEL (merge_case);
1134 tree t = int_const_binop (PLUS_EXPR, base_high,
1135 integer_one_node, 1);
1137 /* Merge the cases if they jump to the same place,
1138 and their ranges are consecutive. */
1139 if (merge_label == base_label
1140 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1142 base_high = CASE_HIGH (merge_case) ?
1143 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1144 CASE_HIGH (base_case) = base_high;
1145 TREE_VEC_ELT (labels, i) = NULL_TREE;
1146 new_size--;
1147 i++;
1149 else
1150 break;
1154 /* Compress the case labels in the label vector, and adjust the
1155 length of the vector. */
1156 for (i = 0, j = 0; i < new_size; i++)
1158 while (! TREE_VEC_ELT (labels, j))
1159 j++;
1160 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1162 TREE_VEC_LENGTH (labels) = new_size;
1167 /* Checks whether we can merge block B into block A. */
1169 static bool
1170 tree_can_merge_blocks_p (basic_block a, basic_block b)
1172 tree stmt;
1173 block_stmt_iterator bsi;
1174 tree phi;
1176 if (!single_succ_p (a))
1177 return false;
1179 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1180 return false;
1182 if (single_succ (a) != b)
1183 return false;
1185 if (!single_pred_p (b))
1186 return false;
1188 if (b == EXIT_BLOCK_PTR)
1189 return false;
1191 /* If A ends by a statement causing exceptions or something similar, we
1192 cannot merge the blocks. */
1193 stmt = last_stmt (a);
1194 if (stmt && stmt_ends_bb_p (stmt))
1195 return false;
1197 /* Do not allow a block with only a non-local label to be merged. */
1198 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1199 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1200 return false;
1202 /* It must be possible to eliminate all phi nodes in B. If ssa form
1203 is not up-to-date, we cannot eliminate any phis. */
1204 phi = phi_nodes (b);
1205 if (phi)
1207 if (need_ssa_update_p ())
1208 return false;
1210 for (; phi; phi = PHI_CHAIN (phi))
1211 if (!is_gimple_reg (PHI_RESULT (phi))
1212 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1213 return false;
1216 /* Do not remove user labels. */
1217 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1219 stmt = bsi_stmt (bsi);
1220 if (TREE_CODE (stmt) != LABEL_EXPR)
1221 break;
1222 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1223 return false;
1226 /* Protect the loop latches. */
1227 if (current_loops
1228 && b->loop_father->latch == b)
1229 return false;
1231 return true;
1234 /* Replaces all uses of NAME by VAL. */
1236 void
1237 replace_uses_by (tree name, tree val)
1239 imm_use_iterator imm_iter;
1240 use_operand_p use;
1241 tree stmt;
1242 edge e;
1243 unsigned i;
1246 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1248 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1250 replace_exp (use, val);
1252 if (TREE_CODE (stmt) == PHI_NODE)
1254 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1255 if (e->flags & EDGE_ABNORMAL)
1257 /* This can only occur for virtual operands, since
1258 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1259 would prevent replacement. */
1260 gcc_assert (!is_gimple_reg (name));
1261 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1265 if (TREE_CODE (stmt) != PHI_NODE)
1267 tree rhs;
1269 fold_stmt_inplace (stmt);
1270 rhs = get_rhs (stmt);
1271 if (TREE_CODE (rhs) == ADDR_EXPR)
1272 recompute_tree_invariant_for_addr_expr (rhs);
1274 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1275 mark_new_vars_to_rename (stmt);
1279 gcc_assert (num_imm_uses (name) == 0);
1281 /* Also update the trees stored in loop structures. */
1282 if (current_loops)
1284 struct loop *loop;
1286 for (i = 0; i < current_loops->num; i++)
1288 loop = current_loops->parray[i];
1289 if (loop)
1290 substitute_in_loop_info (loop, name, val);
1295 /* Merge block B into block A. */
1297 static void
1298 tree_merge_blocks (basic_block a, basic_block b)
1300 block_stmt_iterator bsi;
1301 tree_stmt_iterator last;
1302 tree phi;
1304 if (dump_file)
1305 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1309 bsi = bsi_last (a);
1310 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1312 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 tree copy;
1314 bool may_replace_uses = may_propagate_copy (def, use);
1316 /* In case we have loops to care about, do not propagate arguments of
1317 loop closed ssa phi nodes. */
1318 if (current_loops
1319 && is_gimple_reg (def)
1320 && TREE_CODE (use) == SSA_NAME
1321 && a->loop_father != b->loop_father)
1322 may_replace_uses = false;
1324 if (!may_replace_uses)
1326 gcc_assert (is_gimple_reg (def));
1328 /* Note that just emitting the copies is fine -- there is no problem
1329 with ordering of phi nodes. This is because A is the single
1330 predecessor of B, therefore results of the phi nodes cannot
1331 appear as arguments of the phi nodes. */
1332 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1333 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1334 SET_PHI_RESULT (phi, NULL_TREE);
1335 SSA_NAME_DEF_STMT (def) = copy;
1337 else
1338 replace_uses_by (def, use);
1340 remove_phi_node (phi, NULL);
1343 /* Ensure that B follows A. */
1344 move_block_after (b, a);
1346 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1347 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1349 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1350 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1352 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1354 tree label = bsi_stmt (bsi);
1356 bsi_remove (&bsi, false);
1357 /* Now that we can thread computed gotos, we might have
1358 a situation where we have a forced label in block B
1359 However, the label at the start of block B might still be
1360 used in other ways (think about the runtime checking for
1361 Fortran assigned gotos). So we can not just delete the
1362 label. Instead we move the label to the start of block A. */
1363 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1365 block_stmt_iterator dest_bsi = bsi_start (a);
1366 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1369 else
1371 change_bb_for_stmt (bsi_stmt (bsi), a);
1372 bsi_next (&bsi);
1376 /* Merge the chains. */
1377 last = tsi_last (a->stmt_list);
1378 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1379 b->stmt_list = NULL;
1383 /* Return the one of two successors of BB that is not reachable by a
1384 reached by a complex edge, if there is one. Else, return BB. We use
1385 this in optimizations that use post-dominators for their heuristics,
1386 to catch the cases in C++ where function calls are involved. */
1388 basic_block
1389 single_noncomplex_succ (basic_block bb)
1391 edge e0, e1;
1392 if (EDGE_COUNT (bb->succs) != 2)
1393 return bb;
1395 e0 = EDGE_SUCC (bb, 0);
1396 e1 = EDGE_SUCC (bb, 1);
1397 if (e0->flags & EDGE_COMPLEX)
1398 return e1->dest;
1399 if (e1->flags & EDGE_COMPLEX)
1400 return e0->dest;
1402 return bb;
1406 /* Walk the function tree removing unnecessary statements.
1408 * Empty statement nodes are removed
1410 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1412 * Unnecessary COND_EXPRs are removed
1414 * Some unnecessary BIND_EXPRs are removed
1416 Clearly more work could be done. The trick is doing the analysis
1417 and removal fast enough to be a net improvement in compile times.
1419 Note that when we remove a control structure such as a COND_EXPR
1420 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1421 to ensure we eliminate all the useless code. */
1423 struct rus_data
1425 tree *last_goto;
1426 bool repeat;
1427 bool may_throw;
1428 bool may_branch;
1429 bool has_label;
1432 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1434 static bool
1435 remove_useless_stmts_warn_notreached (tree stmt)
1437 if (EXPR_HAS_LOCATION (stmt))
1439 location_t loc = EXPR_LOCATION (stmt);
1440 if (LOCATION_LINE (loc) > 0)
1442 warning (0, "%Hwill never be executed", &loc);
1443 return true;
1447 switch (TREE_CODE (stmt))
1449 case STATEMENT_LIST:
1451 tree_stmt_iterator i;
1452 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1453 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1454 return true;
1456 break;
1458 case COND_EXPR:
1459 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1460 return true;
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1462 return true;
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1464 return true;
1465 break;
1467 case TRY_FINALLY_EXPR:
1468 case TRY_CATCH_EXPR:
1469 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1470 return true;
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1472 return true;
1473 break;
1475 case CATCH_EXPR:
1476 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1477 case EH_FILTER_EXPR:
1478 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1479 case BIND_EXPR:
1480 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1482 default:
1483 /* Not a live container. */
1484 break;
1487 return false;
1490 static void
1491 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1493 tree then_clause, else_clause, cond;
1494 bool save_has_label, then_has_label, else_has_label;
1496 save_has_label = data->has_label;
1497 data->has_label = false;
1498 data->last_goto = NULL;
1500 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1502 then_has_label = data->has_label;
1503 data->has_label = false;
1504 data->last_goto = NULL;
1506 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1508 else_has_label = data->has_label;
1509 data->has_label = save_has_label | then_has_label | else_has_label;
1511 then_clause = COND_EXPR_THEN (*stmt_p);
1512 else_clause = COND_EXPR_ELSE (*stmt_p);
1513 cond = fold (COND_EXPR_COND (*stmt_p));
1515 /* If neither arm does anything at all, we can remove the whole IF. */
1516 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1518 *stmt_p = build_empty_stmt ();
1519 data->repeat = true;
1522 /* If there are no reachable statements in an arm, then we can
1523 zap the entire conditional. */
1524 else if (integer_nonzerop (cond) && !else_has_label)
1526 if (warn_notreached)
1527 remove_useless_stmts_warn_notreached (else_clause);
1528 *stmt_p = then_clause;
1529 data->repeat = true;
1531 else if (integer_zerop (cond) && !then_has_label)
1533 if (warn_notreached)
1534 remove_useless_stmts_warn_notreached (then_clause);
1535 *stmt_p = else_clause;
1536 data->repeat = true;
1539 /* Check a couple of simple things on then/else with single stmts. */
1540 else
1542 tree then_stmt = expr_only (then_clause);
1543 tree else_stmt = expr_only (else_clause);
1545 /* Notice branches to a common destination. */
1546 if (then_stmt && else_stmt
1547 && TREE_CODE (then_stmt) == GOTO_EXPR
1548 && TREE_CODE (else_stmt) == GOTO_EXPR
1549 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1551 *stmt_p = then_stmt;
1552 data->repeat = true;
1555 /* If the THEN/ELSE clause merely assigns a value to a variable or
1556 parameter which is already known to contain that value, then
1557 remove the useless THEN/ELSE clause. */
1558 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1560 if (else_stmt
1561 && TREE_CODE (else_stmt) == MODIFY_EXPR
1562 && TREE_OPERAND (else_stmt, 0) == cond
1563 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1564 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1566 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1567 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1568 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1569 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1571 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1572 ? then_stmt : else_stmt);
1573 tree *location = (TREE_CODE (cond) == EQ_EXPR
1574 ? &COND_EXPR_THEN (*stmt_p)
1575 : &COND_EXPR_ELSE (*stmt_p));
1577 if (stmt
1578 && TREE_CODE (stmt) == MODIFY_EXPR
1579 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1580 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1581 *location = alloc_stmt_list ();
1585 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1586 would be re-introduced during lowering. */
1587 data->last_goto = NULL;
1591 static void
1592 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1594 bool save_may_branch, save_may_throw;
1595 bool this_may_branch, this_may_throw;
1597 /* Collect may_branch and may_throw information for the body only. */
1598 save_may_branch = data->may_branch;
1599 save_may_throw = data->may_throw;
1600 data->may_branch = false;
1601 data->may_throw = false;
1602 data->last_goto = NULL;
1604 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1606 this_may_branch = data->may_branch;
1607 this_may_throw = data->may_throw;
1608 data->may_branch |= save_may_branch;
1609 data->may_throw |= save_may_throw;
1610 data->last_goto = NULL;
1612 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1614 /* If the body is empty, then we can emit the FINALLY block without
1615 the enclosing TRY_FINALLY_EXPR. */
1616 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1618 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1619 data->repeat = true;
1622 /* If the handler is empty, then we can emit the TRY block without
1623 the enclosing TRY_FINALLY_EXPR. */
1624 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1626 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1627 data->repeat = true;
1630 /* If the body neither throws, nor branches, then we can safely
1631 string the TRY and FINALLY blocks together. */
1632 else if (!this_may_branch && !this_may_throw)
1634 tree stmt = *stmt_p;
1635 *stmt_p = TREE_OPERAND (stmt, 0);
1636 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1637 data->repeat = true;
1642 static void
1643 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1645 bool save_may_throw, this_may_throw;
1646 tree_stmt_iterator i;
1647 tree stmt;
1649 /* Collect may_throw information for the body only. */
1650 save_may_throw = data->may_throw;
1651 data->may_throw = false;
1652 data->last_goto = NULL;
1654 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1656 this_may_throw = data->may_throw;
1657 data->may_throw = save_may_throw;
1659 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1660 if (!this_may_throw)
1662 if (warn_notreached)
1663 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1664 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1665 data->repeat = true;
1666 return;
1669 /* Process the catch clause specially. We may be able to tell that
1670 no exceptions propagate past this point. */
1672 this_may_throw = true;
1673 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1674 stmt = tsi_stmt (i);
1675 data->last_goto = NULL;
1677 switch (TREE_CODE (stmt))
1679 case CATCH_EXPR:
1680 for (; !tsi_end_p (i); tsi_next (&i))
1682 stmt = tsi_stmt (i);
1683 /* If we catch all exceptions, then the body does not
1684 propagate exceptions past this point. */
1685 if (CATCH_TYPES (stmt) == NULL)
1686 this_may_throw = false;
1687 data->last_goto = NULL;
1688 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1690 break;
1692 case EH_FILTER_EXPR:
1693 if (EH_FILTER_MUST_NOT_THROW (stmt))
1694 this_may_throw = false;
1695 else if (EH_FILTER_TYPES (stmt) == NULL)
1696 this_may_throw = false;
1697 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1698 break;
1700 default:
1701 /* Otherwise this is a cleanup. */
1702 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1704 /* If the cleanup is empty, then we can emit the TRY block without
1705 the enclosing TRY_CATCH_EXPR. */
1706 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1708 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1709 data->repeat = true;
1711 break;
1713 data->may_throw |= this_may_throw;
1717 static void
1718 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1720 tree block;
1722 /* First remove anything underneath the BIND_EXPR. */
1723 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1725 /* If the BIND_EXPR has no variables, then we can pull everything
1726 up one level and remove the BIND_EXPR, unless this is the toplevel
1727 BIND_EXPR for the current function or an inlined function.
1729 When this situation occurs we will want to apply this
1730 optimization again. */
1731 block = BIND_EXPR_BLOCK (*stmt_p);
1732 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1733 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1734 && (! block
1735 || ! BLOCK_ABSTRACT_ORIGIN (block)
1736 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1737 != FUNCTION_DECL)))
1739 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1740 data->repeat = true;
1745 static void
1746 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1748 tree dest = GOTO_DESTINATION (*stmt_p);
1750 data->may_branch = true;
1751 data->last_goto = NULL;
1753 /* Record the last goto expr, so that we can delete it if unnecessary. */
1754 if (TREE_CODE (dest) == LABEL_DECL)
1755 data->last_goto = stmt_p;
1759 static void
1760 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1762 tree label = LABEL_EXPR_LABEL (*stmt_p);
1764 data->has_label = true;
1766 /* We do want to jump across non-local label receiver code. */
1767 if (DECL_NONLOCAL (label))
1768 data->last_goto = NULL;
1770 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1772 *data->last_goto = build_empty_stmt ();
1773 data->repeat = true;
1776 /* ??? Add something here to delete unused labels. */
1780 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1781 decl. This allows us to eliminate redundant or useless
1782 calls to "const" functions.
1784 Gimplifier already does the same operation, but we may notice functions
1785 being const and pure once their calls has been gimplified, so we need
1786 to update the flag. */
1788 static void
1789 update_call_expr_flags (tree call)
1791 tree decl = get_callee_fndecl (call);
1792 if (!decl)
1793 return;
1794 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1795 TREE_SIDE_EFFECTS (call) = 0;
1796 if (TREE_NOTHROW (decl))
1797 TREE_NOTHROW (call) = 1;
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1803 void
1804 notice_special_calls (tree t)
1806 int flags = call_expr_flags (t);
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 current_function_calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 current_function_calls_setjmp = true;
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1818 void
1819 clear_special_calls (void)
1821 current_function_calls_alloca = false;
1822 current_function_calls_setjmp = false;
1826 static void
1827 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1829 tree t = *tp, op;
1831 switch (TREE_CODE (t))
1833 case COND_EXPR:
1834 remove_useless_stmts_cond (tp, data);
1835 break;
1837 case TRY_FINALLY_EXPR:
1838 remove_useless_stmts_tf (tp, data);
1839 break;
1841 case TRY_CATCH_EXPR:
1842 remove_useless_stmts_tc (tp, data);
1843 break;
1845 case BIND_EXPR:
1846 remove_useless_stmts_bind (tp, data);
1847 break;
1849 case GOTO_EXPR:
1850 remove_useless_stmts_goto (tp, data);
1851 break;
1853 case LABEL_EXPR:
1854 remove_useless_stmts_label (tp, data);
1855 break;
1857 case RETURN_EXPR:
1858 fold_stmt (tp);
1859 data->last_goto = NULL;
1860 data->may_branch = true;
1861 break;
1863 case CALL_EXPR:
1864 fold_stmt (tp);
1865 data->last_goto = NULL;
1866 notice_special_calls (t);
1867 update_call_expr_flags (t);
1868 if (tree_could_throw_p (t))
1869 data->may_throw = true;
1870 break;
1872 case MODIFY_EXPR:
1873 data->last_goto = NULL;
1874 fold_stmt (tp);
1875 op = get_call_expr_in (t);
1876 if (op)
1878 update_call_expr_flags (op);
1879 notice_special_calls (op);
1881 if (tree_could_throw_p (t))
1882 data->may_throw = true;
1883 break;
1885 case STATEMENT_LIST:
1887 tree_stmt_iterator i = tsi_start (t);
1888 while (!tsi_end_p (i))
1890 t = tsi_stmt (i);
1891 if (IS_EMPTY_STMT (t))
1893 tsi_delink (&i);
1894 continue;
1897 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1899 t = tsi_stmt (i);
1900 if (TREE_CODE (t) == STATEMENT_LIST)
1902 tsi_link_before (&i, t, TSI_SAME_STMT);
1903 tsi_delink (&i);
1905 else
1906 tsi_next (&i);
1909 break;
1910 case ASM_EXPR:
1911 fold_stmt (tp);
1912 data->last_goto = NULL;
1913 break;
1915 default:
1916 data->last_goto = NULL;
1917 break;
1921 static unsigned int
1922 remove_useless_stmts (void)
1924 struct rus_data data;
1926 clear_special_calls ();
1930 memset (&data, 0, sizeof (data));
1931 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1933 while (data.repeat);
1934 return 0;
1938 struct tree_opt_pass pass_remove_useless_stmts =
1940 "useless", /* name */
1941 NULL, /* gate */
1942 remove_useless_stmts, /* execute */
1943 NULL, /* sub */
1944 NULL, /* next */
1945 0, /* static_pass_number */
1946 0, /* tv_id */
1947 PROP_gimple_any, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 TODO_dump_func, /* todo_flags_finish */
1952 0 /* letter */
1955 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1957 static void
1958 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1960 tree phi;
1962 /* Since this block is no longer reachable, we can just delete all
1963 of its PHI nodes. */
1964 phi = phi_nodes (bb);
1965 while (phi)
1967 tree next = PHI_CHAIN (phi);
1968 remove_phi_node (phi, NULL_TREE);
1969 phi = next;
1972 /* Remove edges to BB's successors. */
1973 while (EDGE_COUNT (bb->succs) > 0)
1974 remove_edge (EDGE_SUCC (bb, 0));
1978 /* Remove statements of basic block BB. */
1980 static void
1981 remove_bb (basic_block bb)
1983 block_stmt_iterator i;
1984 #ifdef USE_MAPPED_LOCATION
1985 source_location loc = UNKNOWN_LOCATION;
1986 #else
1987 source_locus loc = 0;
1988 #endif
1990 if (dump_file)
1992 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1993 if (dump_flags & TDF_DETAILS)
1995 dump_bb (bb, dump_file, 0);
1996 fprintf (dump_file, "\n");
2000 /* If we remove the header or the latch of a loop, mark the loop for
2001 removal by setting its header and latch to NULL. */
2002 if (current_loops)
2004 struct loop *loop = bb->loop_father;
2006 if (loop->latch == bb
2007 || loop->header == bb)
2009 loop->latch = NULL;
2010 loop->header = NULL;
2012 /* Also clean up the information associated with the loop. Updating
2013 it would waste time. More importantly, it may refer to ssa
2014 names that were defined in other removed basic block -- these
2015 ssa names are now removed and invalid. */
2016 free_numbers_of_iterations_estimates_loop (loop);
2020 /* Remove all the instructions in the block. */
2021 for (i = bsi_start (bb); !bsi_end_p (i);)
2023 tree stmt = bsi_stmt (i);
2024 if (TREE_CODE (stmt) == LABEL_EXPR
2025 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2026 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2028 basic_block new_bb;
2029 block_stmt_iterator new_bsi;
2031 /* A non-reachable non-local label may still be referenced.
2032 But it no longer needs to carry the extra semantics of
2033 non-locality. */
2034 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2036 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2037 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2040 new_bb = bb->prev_bb;
2041 new_bsi = bsi_start (new_bb);
2042 bsi_remove (&i, false);
2043 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2045 else
2047 /* Release SSA definitions if we are in SSA. Note that we
2048 may be called when not in SSA. For example,
2049 final_cleanup calls this function via
2050 cleanup_tree_cfg. */
2051 if (in_ssa_p)
2052 release_defs (stmt);
2054 bsi_remove (&i, true);
2057 /* Don't warn for removed gotos. Gotos are often removed due to
2058 jump threading, thus resulting in bogus warnings. Not great,
2059 since this way we lose warnings for gotos in the original
2060 program that are indeed unreachable. */
2061 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2063 #ifdef USE_MAPPED_LOCATION
2064 if (EXPR_HAS_LOCATION (stmt))
2065 loc = EXPR_LOCATION (stmt);
2066 #else
2067 source_locus t;
2068 t = EXPR_LOCUS (stmt);
2069 if (t && LOCATION_LINE (*t) > 0)
2070 loc = t;
2071 #endif
2075 /* If requested, give a warning that the first statement in the
2076 block is unreachable. We walk statements backwards in the
2077 loop above, so the last statement we process is the first statement
2078 in the block. */
2079 #ifdef USE_MAPPED_LOCATION
2080 if (loc > BUILTINS_LOCATION)
2081 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2082 #else
2083 if (loc)
2084 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2085 #endif
2087 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2091 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2092 predicate VAL, return the edge that will be taken out of the block.
2093 If VAL does not match a unique edge, NULL is returned. */
2095 edge
2096 find_taken_edge (basic_block bb, tree val)
2098 tree stmt;
2100 stmt = last_stmt (bb);
2102 gcc_assert (stmt);
2103 gcc_assert (is_ctrl_stmt (stmt));
2104 gcc_assert (val);
2106 if (! is_gimple_min_invariant (val))
2107 return NULL;
2109 if (TREE_CODE (stmt) == COND_EXPR)
2110 return find_taken_edge_cond_expr (bb, val);
2112 if (TREE_CODE (stmt) == SWITCH_EXPR)
2113 return find_taken_edge_switch_expr (bb, val);
2115 if (computed_goto_p (stmt))
2116 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2118 gcc_unreachable ();
2121 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2122 statement, determine which of the outgoing edges will be taken out of the
2123 block. Return NULL if either edge may be taken. */
2125 static edge
2126 find_taken_edge_computed_goto (basic_block bb, tree val)
2128 basic_block dest;
2129 edge e = NULL;
2131 dest = label_to_block (val);
2132 if (dest)
2134 e = find_edge (bb, dest);
2135 gcc_assert (e != NULL);
2138 return e;
2141 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2142 statement, determine which of the two edges will be taken out of the
2143 block. Return NULL if either edge may be taken. */
2145 static edge
2146 find_taken_edge_cond_expr (basic_block bb, tree val)
2148 edge true_edge, false_edge;
2150 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2152 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2153 return (zero_p (val) ? false_edge : true_edge);
2156 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2157 statement, determine which edge will be taken out of the block. Return
2158 NULL if any edge may be taken. */
2160 static edge
2161 find_taken_edge_switch_expr (basic_block bb, tree val)
2163 tree switch_expr, taken_case;
2164 basic_block dest_bb;
2165 edge e;
2167 switch_expr = last_stmt (bb);
2168 taken_case = find_case_label_for_value (switch_expr, val);
2169 dest_bb = label_to_block (CASE_LABEL (taken_case));
2171 e = find_edge (bb, dest_bb);
2172 gcc_assert (e);
2173 return e;
2177 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2178 We can make optimal use here of the fact that the case labels are
2179 sorted: We can do a binary search for a case matching VAL. */
2181 static tree
2182 find_case_label_for_value (tree switch_expr, tree val)
2184 tree vec = SWITCH_LABELS (switch_expr);
2185 size_t low, high, n = TREE_VEC_LENGTH (vec);
2186 tree default_case = TREE_VEC_ELT (vec, n - 1);
2188 for (low = -1, high = n - 1; high - low > 1; )
2190 size_t i = (high + low) / 2;
2191 tree t = TREE_VEC_ELT (vec, i);
2192 int cmp;
2194 /* Cache the result of comparing CASE_LOW and val. */
2195 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2197 if (cmp > 0)
2198 high = i;
2199 else
2200 low = i;
2202 if (CASE_HIGH (t) == NULL)
2204 /* A singe-valued case label. */
2205 if (cmp == 0)
2206 return t;
2208 else
2210 /* A case range. We can only handle integer ranges. */
2211 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2212 return t;
2216 return default_case;
2222 /*---------------------------------------------------------------------------
2223 Debugging functions
2224 ---------------------------------------------------------------------------*/
2226 /* Dump tree-specific information of block BB to file OUTF. */
2228 void
2229 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2231 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2235 /* Dump a basic block on stderr. */
2237 void
2238 debug_tree_bb (basic_block bb)
2240 dump_bb (bb, stderr, 0);
2244 /* Dump basic block with index N on stderr. */
2246 basic_block
2247 debug_tree_bb_n (int n)
2249 debug_tree_bb (BASIC_BLOCK (n));
2250 return BASIC_BLOCK (n);
2254 /* Dump the CFG on stderr.
2256 FLAGS are the same used by the tree dumping functions
2257 (see TDF_* in tree-pass.h). */
2259 void
2260 debug_tree_cfg (int flags)
2262 dump_tree_cfg (stderr, flags);
2266 /* Dump the program showing basic block boundaries on the given FILE.
2268 FLAGS are the same used by the tree dumping functions (see TDF_* in
2269 tree.h). */
2271 void
2272 dump_tree_cfg (FILE *file, int flags)
2274 if (flags & TDF_DETAILS)
2276 const char *funcname
2277 = lang_hooks.decl_printable_name (current_function_decl, 2);
2279 fputc ('\n', file);
2280 fprintf (file, ";; Function %s\n\n", funcname);
2281 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2282 n_basic_blocks, n_edges, last_basic_block);
2284 brief_dump_cfg (file);
2285 fprintf (file, "\n");
2288 if (flags & TDF_STATS)
2289 dump_cfg_stats (file);
2291 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2295 /* Dump CFG statistics on FILE. */
2297 void
2298 dump_cfg_stats (FILE *file)
2300 static long max_num_merged_labels = 0;
2301 unsigned long size, total = 0;
2302 long num_edges;
2303 basic_block bb;
2304 const char * const fmt_str = "%-30s%-13s%12s\n";
2305 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2306 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2307 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2308 const char *funcname
2309 = lang_hooks.decl_printable_name (current_function_decl, 2);
2312 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2314 fprintf (file, "---------------------------------------------------------\n");
2315 fprintf (file, fmt_str, "", " Number of ", "Memory");
2316 fprintf (file, fmt_str, "", " instances ", "used ");
2317 fprintf (file, "---------------------------------------------------------\n");
2319 size = n_basic_blocks * sizeof (struct basic_block_def);
2320 total += size;
2321 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2322 SCALE (size), LABEL (size));
2324 num_edges = 0;
2325 FOR_EACH_BB (bb)
2326 num_edges += EDGE_COUNT (bb->succs);
2327 size = num_edges * sizeof (struct edge_def);
2328 total += size;
2329 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2331 fprintf (file, "---------------------------------------------------------\n");
2332 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2333 LABEL (total));
2334 fprintf (file, "---------------------------------------------------------\n");
2335 fprintf (file, "\n");
2337 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2338 max_num_merged_labels = cfg_stats.num_merged_labels;
2340 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2341 cfg_stats.num_merged_labels, max_num_merged_labels);
2343 fprintf (file, "\n");
2347 /* Dump CFG statistics on stderr. Keep extern so that it's always
2348 linked in the final executable. */
2350 void
2351 debug_cfg_stats (void)
2353 dump_cfg_stats (stderr);
2357 /* Dump the flowgraph to a .vcg FILE. */
2359 static void
2360 tree_cfg2vcg (FILE *file)
2362 edge e;
2363 edge_iterator ei;
2364 basic_block bb;
2365 const char *funcname
2366 = lang_hooks.decl_printable_name (current_function_decl, 2);
2368 /* Write the file header. */
2369 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2370 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2371 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2373 /* Write blocks and edges. */
2374 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2376 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2377 e->dest->index);
2379 if (e->flags & EDGE_FAKE)
2380 fprintf (file, " linestyle: dotted priority: 10");
2381 else
2382 fprintf (file, " linestyle: solid priority: 100");
2384 fprintf (file, " }\n");
2386 fputc ('\n', file);
2388 FOR_EACH_BB (bb)
2390 enum tree_code head_code, end_code;
2391 const char *head_name, *end_name;
2392 int head_line = 0;
2393 int end_line = 0;
2394 tree first = first_stmt (bb);
2395 tree last = last_stmt (bb);
2397 if (first)
2399 head_code = TREE_CODE (first);
2400 head_name = tree_code_name[head_code];
2401 head_line = get_lineno (first);
2403 else
2404 head_name = "no-statement";
2406 if (last)
2408 end_code = TREE_CODE (last);
2409 end_name = tree_code_name[end_code];
2410 end_line = get_lineno (last);
2412 else
2413 end_name = "no-statement";
2415 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2416 bb->index, bb->index, head_name, head_line, end_name,
2417 end_line);
2419 FOR_EACH_EDGE (e, ei, bb->succs)
2421 if (e->dest == EXIT_BLOCK_PTR)
2422 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2423 else
2424 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2426 if (e->flags & EDGE_FAKE)
2427 fprintf (file, " priority: 10 linestyle: dotted");
2428 else
2429 fprintf (file, " priority: 100 linestyle: solid");
2431 fprintf (file, " }\n");
2434 if (bb->next_bb != EXIT_BLOCK_PTR)
2435 fputc ('\n', file);
2438 fputs ("}\n\n", file);
2443 /*---------------------------------------------------------------------------
2444 Miscellaneous helpers
2445 ---------------------------------------------------------------------------*/
2447 /* Return true if T represents a stmt that always transfers control. */
2449 bool
2450 is_ctrl_stmt (tree t)
2452 return (TREE_CODE (t) == COND_EXPR
2453 || TREE_CODE (t) == SWITCH_EXPR
2454 || TREE_CODE (t) == GOTO_EXPR
2455 || TREE_CODE (t) == RETURN_EXPR
2456 || TREE_CODE (t) == RESX_EXPR);
2460 /* Return true if T is a statement that may alter the flow of control
2461 (e.g., a call to a non-returning function). */
2463 bool
2464 is_ctrl_altering_stmt (tree t)
2466 tree call;
2468 gcc_assert (t);
2469 call = get_call_expr_in (t);
2470 if (call)
2472 /* A non-pure/const CALL_EXPR alters flow control if the current
2473 function has nonlocal labels. */
2474 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2475 return true;
2477 /* A CALL_EXPR also alters control flow if it does not return. */
2478 if (call_expr_flags (call) & ECF_NORETURN)
2479 return true;
2482 /* OpenMP directives alter control flow. */
2483 if (OMP_DIRECTIVE_P (t))
2484 return true;
2486 /* If a statement can throw, it alters control flow. */
2487 return tree_can_throw_internal (t);
2491 /* Return true if T is a computed goto. */
2493 bool
2494 computed_goto_p (tree t)
2496 return (TREE_CODE (t) == GOTO_EXPR
2497 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2501 /* Return true if T is a simple local goto. */
2503 bool
2504 simple_goto_p (tree t)
2506 return (TREE_CODE (t) == GOTO_EXPR
2507 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2511 /* Return true if T can make an abnormal transfer of control flow.
2512 Transfers of control flow associated with EH are excluded. */
2514 bool
2515 tree_can_make_abnormal_goto (tree t)
2517 if (computed_goto_p (t))
2518 return true;
2519 if (TREE_CODE (t) == MODIFY_EXPR)
2520 t = TREE_OPERAND (t, 1);
2521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2522 t = TREE_OPERAND (t, 0);
2523 if (TREE_CODE (t) == CALL_EXPR)
2524 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2525 return false;
2529 /* Return true if T should start a new basic block. PREV_T is the
2530 statement preceding T. It is used when T is a label or a case label.
2531 Labels should only start a new basic block if their previous statement
2532 wasn't a label. Otherwise, sequence of labels would generate
2533 unnecessary basic blocks that only contain a single label. */
2535 static inline bool
2536 stmt_starts_bb_p (tree t, tree prev_t)
2538 if (t == NULL_TREE)
2539 return false;
2541 /* LABEL_EXPRs start a new basic block only if the preceding
2542 statement wasn't a label of the same type. This prevents the
2543 creation of consecutive blocks that have nothing but a single
2544 label. */
2545 if (TREE_CODE (t) == LABEL_EXPR)
2547 /* Nonlocal and computed GOTO targets always start a new block. */
2548 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2549 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2550 return true;
2552 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2554 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2555 return true;
2557 cfg_stats.num_merged_labels++;
2558 return false;
2560 else
2561 return true;
2564 return false;
2568 /* Return true if T should end a basic block. */
2570 bool
2571 stmt_ends_bb_p (tree t)
2573 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2577 /* Add gotos that used to be represented implicitly in the CFG. */
2579 void
2580 disband_implicit_edges (void)
2582 basic_block bb;
2583 block_stmt_iterator last;
2584 edge e;
2585 edge_iterator ei;
2586 tree stmt, label;
2588 FOR_EACH_BB (bb)
2590 last = bsi_last (bb);
2591 stmt = last_stmt (bb);
2593 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2595 /* Remove superfluous gotos from COND_EXPR branches. Moved
2596 from cfg_remove_useless_stmts here since it violates the
2597 invariants for tree--cfg correspondence and thus fits better
2598 here where we do it anyway. */
2599 e = find_edge (bb, bb->next_bb);
2600 if (e)
2602 if (e->flags & EDGE_TRUE_VALUE)
2603 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2604 else if (e->flags & EDGE_FALSE_VALUE)
2605 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2606 else
2607 gcc_unreachable ();
2608 e->flags |= EDGE_FALLTHRU;
2611 continue;
2614 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2616 /* Remove the RETURN_EXPR if we may fall though to the exit
2617 instead. */
2618 gcc_assert (single_succ_p (bb));
2619 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2621 if (bb->next_bb == EXIT_BLOCK_PTR
2622 && !TREE_OPERAND (stmt, 0))
2624 bsi_remove (&last, true);
2625 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2627 continue;
2630 /* There can be no fallthru edge if the last statement is a control
2631 one. */
2632 if (stmt && is_ctrl_stmt (stmt))
2633 continue;
2635 /* Find a fallthru edge and emit the goto if necessary. */
2636 FOR_EACH_EDGE (e, ei, bb->succs)
2637 if (e->flags & EDGE_FALLTHRU)
2638 break;
2640 if (!e || e->dest == bb->next_bb)
2641 continue;
2643 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2644 label = tree_block_label (e->dest);
2646 stmt = build1 (GOTO_EXPR, void_type_node, label);
2647 #ifdef USE_MAPPED_LOCATION
2648 SET_EXPR_LOCATION (stmt, e->goto_locus);
2649 #else
2650 SET_EXPR_LOCUS (stmt, e->goto_locus);
2651 #endif
2652 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2653 e->flags &= ~EDGE_FALLTHRU;
2657 /* Remove block annotations and other datastructures. */
2659 void
2660 delete_tree_cfg_annotations (void)
2662 label_to_block_map = NULL;
2666 /* Return the first statement in basic block BB. */
2668 tree
2669 first_stmt (basic_block bb)
2671 block_stmt_iterator i = bsi_start (bb);
2672 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2676 /* Return the last statement in basic block BB. */
2678 tree
2679 last_stmt (basic_block bb)
2681 block_stmt_iterator b = bsi_last (bb);
2682 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2686 /* Return a pointer to the last statement in block BB. */
2688 tree *
2689 last_stmt_ptr (basic_block bb)
2691 block_stmt_iterator last = bsi_last (bb);
2692 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2696 /* Return the last statement of an otherwise empty block. Return NULL
2697 if the block is totally empty, or if it contains more than one
2698 statement. */
2700 tree
2701 last_and_only_stmt (basic_block bb)
2703 block_stmt_iterator i = bsi_last (bb);
2704 tree last, prev;
2706 if (bsi_end_p (i))
2707 return NULL_TREE;
2709 last = bsi_stmt (i);
2710 bsi_prev (&i);
2711 if (bsi_end_p (i))
2712 return last;
2714 /* Empty statements should no longer appear in the instruction stream.
2715 Everything that might have appeared before should be deleted by
2716 remove_useless_stmts, and the optimizers should just bsi_remove
2717 instead of smashing with build_empty_stmt.
2719 Thus the only thing that should appear here in a block containing
2720 one executable statement is a label. */
2721 prev = bsi_stmt (i);
2722 if (TREE_CODE (prev) == LABEL_EXPR)
2723 return last;
2724 else
2725 return NULL_TREE;
2729 /* Mark BB as the basic block holding statement T. */
2731 void
2732 set_bb_for_stmt (tree t, basic_block bb)
2734 if (TREE_CODE (t) == PHI_NODE)
2735 PHI_BB (t) = bb;
2736 else if (TREE_CODE (t) == STATEMENT_LIST)
2738 tree_stmt_iterator i;
2739 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2740 set_bb_for_stmt (tsi_stmt (i), bb);
2742 else
2744 stmt_ann_t ann = get_stmt_ann (t);
2745 ann->bb = bb;
2747 /* If the statement is a label, add the label to block-to-labels map
2748 so that we can speed up edge creation for GOTO_EXPRs. */
2749 if (TREE_CODE (t) == LABEL_EXPR)
2751 int uid;
2753 t = LABEL_EXPR_LABEL (t);
2754 uid = LABEL_DECL_UID (t);
2755 if (uid == -1)
2757 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2758 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2759 if (old_len <= (unsigned) uid)
2761 basic_block *addr;
2762 unsigned new_len = 3 * uid / 2;
2764 VEC_safe_grow (basic_block, gc, label_to_block_map,
2765 new_len);
2766 addr = VEC_address (basic_block, label_to_block_map);
2767 memset (&addr[old_len],
2768 0, sizeof (basic_block) * (new_len - old_len));
2771 else
2772 /* We're moving an existing label. Make sure that we've
2773 removed it from the old block. */
2774 gcc_assert (!bb
2775 || !VEC_index (basic_block, label_to_block_map, uid));
2776 VEC_replace (basic_block, label_to_block_map, uid, bb);
2781 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2782 from one basic block to another.
2783 For BB splitting we can run into quadratic case, so performance is quite
2784 important and knowing that the tables are big enough, change_bb_for_stmt
2785 can inline as leaf function. */
2786 static inline void
2787 change_bb_for_stmt (tree t, basic_block bb)
2789 get_stmt_ann (t)->bb = bb;
2790 if (TREE_CODE (t) == LABEL_EXPR)
2791 VEC_replace (basic_block, label_to_block_map,
2792 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2795 /* Finds iterator for STMT. */
2797 extern block_stmt_iterator
2798 bsi_for_stmt (tree stmt)
2800 block_stmt_iterator bsi;
2802 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2803 if (bsi_stmt (bsi) == stmt)
2804 return bsi;
2806 gcc_unreachable ();
2809 /* Mark statement T as modified, and update it. */
2810 static inline void
2811 update_modified_stmts (tree t)
2813 if (TREE_CODE (t) == STATEMENT_LIST)
2815 tree_stmt_iterator i;
2816 tree stmt;
2817 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2819 stmt = tsi_stmt (i);
2820 update_stmt_if_modified (stmt);
2823 else
2824 update_stmt_if_modified (t);
2827 /* Insert statement (or statement list) T before the statement
2828 pointed-to by iterator I. M specifies how to update iterator I
2829 after insertion (see enum bsi_iterator_update). */
2831 void
2832 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2834 set_bb_for_stmt (t, i->bb);
2835 update_modified_stmts (t);
2836 tsi_link_before (&i->tsi, t, m);
2840 /* Insert statement (or statement list) T after the statement
2841 pointed-to by iterator I. M specifies how to update iterator I
2842 after insertion (see enum bsi_iterator_update). */
2844 void
2845 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2847 set_bb_for_stmt (t, i->bb);
2848 update_modified_stmts (t);
2849 tsi_link_after (&i->tsi, t, m);
2853 /* Remove the statement pointed to by iterator I. The iterator is updated
2854 to the next statement.
2856 When REMOVE_EH_INFO is true we remove the statement pointed to by
2857 iterator I from the EH tables. Otherwise we do not modify the EH
2858 tables.
2860 Generally, REMOVE_EH_INFO should be true when the statement is going to
2861 be removed from the IL and not reinserted elsewhere. */
2863 void
2864 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2866 tree t = bsi_stmt (*i);
2867 set_bb_for_stmt (t, NULL);
2868 delink_stmt_imm_use (t);
2869 tsi_delink (&i->tsi);
2870 mark_stmt_modified (t);
2871 if (remove_eh_info)
2872 remove_stmt_from_eh_region (t);
2876 /* Move the statement at FROM so it comes right after the statement at TO. */
2878 void
2879 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2881 tree stmt = bsi_stmt (*from);
2882 bsi_remove (from, false);
2883 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2887 /* Move the statement at FROM so it comes right before the statement at TO. */
2889 void
2890 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2892 tree stmt = bsi_stmt (*from);
2893 bsi_remove (from, false);
2894 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2898 /* Move the statement at FROM to the end of basic block BB. */
2900 void
2901 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2903 block_stmt_iterator last = bsi_last (bb);
2905 /* Have to check bsi_end_p because it could be an empty block. */
2906 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2907 bsi_move_before (from, &last);
2908 else
2909 bsi_move_after (from, &last);
2913 /* Replace the contents of the statement pointed to by iterator BSI
2914 with STMT. If UPDATE_EH_INFO is true, the exception handling
2915 information of the original statement is moved to the new statement. */
2917 void
2918 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2920 int eh_region;
2921 tree orig_stmt = bsi_stmt (*bsi);
2923 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2924 set_bb_for_stmt (stmt, bsi->bb);
2926 /* Preserve EH region information from the original statement, if
2927 requested by the caller. */
2928 if (update_eh_info)
2930 eh_region = lookup_stmt_eh_region (orig_stmt);
2931 if (eh_region >= 0)
2933 remove_stmt_from_eh_region (orig_stmt);
2934 add_stmt_to_eh_region (stmt, eh_region);
2938 delink_stmt_imm_use (orig_stmt);
2939 *bsi_stmt_ptr (*bsi) = stmt;
2940 mark_stmt_modified (stmt);
2941 update_modified_stmts (stmt);
2945 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2946 is made to place the statement in an existing basic block, but
2947 sometimes that isn't possible. When it isn't possible, the edge is
2948 split and the statement is added to the new block.
2950 In all cases, the returned *BSI points to the correct location. The
2951 return value is true if insertion should be done after the location,
2952 or false if it should be done before the location. If new basic block
2953 has to be created, it is stored in *NEW_BB. */
2955 static bool
2956 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2957 basic_block *new_bb)
2959 basic_block dest, src;
2960 tree tmp;
2962 dest = e->dest;
2963 restart:
2965 /* If the destination has one predecessor which has no PHI nodes,
2966 insert there. Except for the exit block.
2968 The requirement for no PHI nodes could be relaxed. Basically we
2969 would have to examine the PHIs to prove that none of them used
2970 the value set by the statement we want to insert on E. That
2971 hardly seems worth the effort. */
2972 if (single_pred_p (dest)
2973 && ! phi_nodes (dest)
2974 && dest != EXIT_BLOCK_PTR)
2976 *bsi = bsi_start (dest);
2977 if (bsi_end_p (*bsi))
2978 return true;
2980 /* Make sure we insert after any leading labels. */
2981 tmp = bsi_stmt (*bsi);
2982 while (TREE_CODE (tmp) == LABEL_EXPR)
2984 bsi_next (bsi);
2985 if (bsi_end_p (*bsi))
2986 break;
2987 tmp = bsi_stmt (*bsi);
2990 if (bsi_end_p (*bsi))
2992 *bsi = bsi_last (dest);
2993 return true;
2995 else
2996 return false;
2999 /* If the source has one successor, the edge is not abnormal and
3000 the last statement does not end a basic block, insert there.
3001 Except for the entry block. */
3002 src = e->src;
3003 if ((e->flags & EDGE_ABNORMAL) == 0
3004 && single_succ_p (src)
3005 && src != ENTRY_BLOCK_PTR)
3007 *bsi = bsi_last (src);
3008 if (bsi_end_p (*bsi))
3009 return true;
3011 tmp = bsi_stmt (*bsi);
3012 if (!stmt_ends_bb_p (tmp))
3013 return true;
3015 /* Insert code just before returning the value. We may need to decompose
3016 the return in the case it contains non-trivial operand. */
3017 if (TREE_CODE (tmp) == RETURN_EXPR)
3019 tree op = TREE_OPERAND (tmp, 0);
3020 if (op && !is_gimple_val (op))
3022 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3023 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3024 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3026 bsi_prev (bsi);
3027 return true;
3031 /* Otherwise, create a new basic block, and split this edge. */
3032 dest = split_edge (e);
3033 if (new_bb)
3034 *new_bb = dest;
3035 e = single_pred_edge (dest);
3036 goto restart;
3040 /* This routine will commit all pending edge insertions, creating any new
3041 basic blocks which are necessary. */
3043 void
3044 bsi_commit_edge_inserts (void)
3046 basic_block bb;
3047 edge e;
3048 edge_iterator ei;
3050 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3052 FOR_EACH_BB (bb)
3053 FOR_EACH_EDGE (e, ei, bb->succs)
3054 bsi_commit_one_edge_insert (e, NULL);
3058 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3059 to this block, otherwise set it to NULL. */
3061 void
3062 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3064 if (new_bb)
3065 *new_bb = NULL;
3066 if (PENDING_STMT (e))
3068 block_stmt_iterator bsi;
3069 tree stmt = PENDING_STMT (e);
3071 PENDING_STMT (e) = NULL_TREE;
3073 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3081 /* Add STMT to the pending list of edge E. No actual insertion is
3082 made until a call to bsi_commit_edge_inserts () is made. */
3084 void
3085 bsi_insert_on_edge (edge e, tree stmt)
3087 append_to_statement_list (stmt, &PENDING_STMT (e));
3090 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3091 block has to be created, it is returned. */
3093 basic_block
3094 bsi_insert_on_edge_immediate (edge e, tree stmt)
3096 block_stmt_iterator bsi;
3097 basic_block new_bb = NULL;
3099 gcc_assert (!PENDING_STMT (e));
3101 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3102 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3103 else
3104 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3106 return new_bb;
3109 /*---------------------------------------------------------------------------
3110 Tree specific functions for CFG manipulation
3111 ---------------------------------------------------------------------------*/
3113 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3115 static void
3116 reinstall_phi_args (edge new_edge, edge old_edge)
3118 tree var, phi;
3120 if (!PENDING_STMT (old_edge))
3121 return;
3123 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3124 var && phi;
3125 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3127 tree result = TREE_PURPOSE (var);
3128 tree arg = TREE_VALUE (var);
3130 gcc_assert (result == PHI_RESULT (phi));
3132 add_phi_arg (phi, arg, new_edge);
3135 PENDING_STMT (old_edge) = NULL;
3138 /* Returns the basic block after which the new basic block created
3139 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3140 near its "logical" location. This is of most help to humans looking
3141 at debugging dumps. */
3143 static basic_block
3144 split_edge_bb_loc (edge edge_in)
3146 basic_block dest = edge_in->dest;
3148 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3149 return edge_in->src;
3150 else
3151 return dest->prev_bb;
3154 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3155 Abort on abnormal edges. */
3157 static basic_block
3158 tree_split_edge (edge edge_in)
3160 basic_block new_bb, after_bb, dest;
3161 edge new_edge, e;
3163 /* Abnormal edges cannot be split. */
3164 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3166 dest = edge_in->dest;
3168 after_bb = split_edge_bb_loc (edge_in);
3170 new_bb = create_empty_bb (after_bb);
3171 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3172 new_bb->count = edge_in->count;
3173 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3174 new_edge->probability = REG_BR_PROB_BASE;
3175 new_edge->count = edge_in->count;
3177 e = redirect_edge_and_branch (edge_in, new_bb);
3178 gcc_assert (e);
3179 reinstall_phi_args (new_edge, e);
3181 return new_bb;
3185 /* Return true when BB has label LABEL in it. */
3187 static bool
3188 has_label_p (basic_block bb, tree label)
3190 block_stmt_iterator bsi;
3192 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3194 tree stmt = bsi_stmt (bsi);
3196 if (TREE_CODE (stmt) != LABEL_EXPR)
3197 return false;
3198 if (LABEL_EXPR_LABEL (stmt) == label)
3199 return true;
3201 return false;
3205 /* Callback for walk_tree, check that all elements with address taken are
3206 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3207 inside a PHI node. */
3209 static tree
3210 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3212 tree t = *tp, x;
3213 bool in_phi = (data != NULL);
3215 if (TYPE_P (t))
3216 *walk_subtrees = 0;
3218 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3219 #define CHECK_OP(N, MSG) \
3220 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3221 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3223 switch (TREE_CODE (t))
3225 case SSA_NAME:
3226 if (SSA_NAME_IN_FREE_LIST (t))
3228 error ("SSA name in freelist but still referenced");
3229 return *tp;
3231 break;
3233 case ASSERT_EXPR:
3234 x = fold (ASSERT_EXPR_COND (t));
3235 if (x == boolean_false_node)
3237 error ("ASSERT_EXPR with an always-false condition");
3238 return *tp;
3240 break;
3242 case MODIFY_EXPR:
3243 x = TREE_OPERAND (t, 0);
3244 if (TREE_CODE (x) == BIT_FIELD_REF
3245 && is_gimple_reg (TREE_OPERAND (x, 0)))
3247 error ("GIMPLE register modified with BIT_FIELD_REF");
3248 return t;
3250 break;
3252 case ADDR_EXPR:
3254 bool old_invariant;
3255 bool old_constant;
3256 bool old_side_effects;
3257 bool new_invariant;
3258 bool new_constant;
3259 bool new_side_effects;
3261 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3262 dead PHIs that take the address of something. But if the PHI
3263 result is dead, the fact that it takes the address of anything
3264 is irrelevant. Because we can not tell from here if a PHI result
3265 is dead, we just skip this check for PHIs altogether. This means
3266 we may be missing "valid" checks, but what can you do?
3267 This was PR19217. */
3268 if (in_phi)
3269 break;
3271 old_invariant = TREE_INVARIANT (t);
3272 old_constant = TREE_CONSTANT (t);
3273 old_side_effects = TREE_SIDE_EFFECTS (t);
3275 recompute_tree_invariant_for_addr_expr (t);
3276 new_invariant = TREE_INVARIANT (t);
3277 new_side_effects = TREE_SIDE_EFFECTS (t);
3278 new_constant = TREE_CONSTANT (t);
3280 if (old_invariant != new_invariant)
3282 error ("invariant not recomputed when ADDR_EXPR changed");
3283 return t;
3286 if (old_constant != new_constant)
3288 error ("constant not recomputed when ADDR_EXPR changed");
3289 return t;
3291 if (old_side_effects != new_side_effects)
3293 error ("side effects not recomputed when ADDR_EXPR changed");
3294 return t;
3297 /* Skip any references (they will be checked when we recurse down the
3298 tree) and ensure that any variable used as a prefix is marked
3299 addressable. */
3300 for (x = TREE_OPERAND (t, 0);
3301 handled_component_p (x);
3302 x = TREE_OPERAND (x, 0))
3305 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3306 return NULL;
3307 if (!TREE_ADDRESSABLE (x))
3309 error ("address taken, but ADDRESSABLE bit not set");
3310 return x;
3312 break;
3315 case COND_EXPR:
3316 x = COND_EXPR_COND (t);
3317 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3319 error ("non-boolean used in condition");
3320 return x;
3322 if (!is_gimple_condexpr (x))
3324 error ("invalid conditional operand");
3325 return x;
3327 break;
3329 case NOP_EXPR:
3330 case CONVERT_EXPR:
3331 case FIX_TRUNC_EXPR:
3332 case FIX_CEIL_EXPR:
3333 case FIX_FLOOR_EXPR:
3334 case FIX_ROUND_EXPR:
3335 case FLOAT_EXPR:
3336 case NEGATE_EXPR:
3337 case ABS_EXPR:
3338 case BIT_NOT_EXPR:
3339 case NON_LVALUE_EXPR:
3340 case TRUTH_NOT_EXPR:
3341 CHECK_OP (0, "invalid operand to unary operator");
3342 break;
3344 case REALPART_EXPR:
3345 case IMAGPART_EXPR:
3346 case COMPONENT_REF:
3347 case ARRAY_REF:
3348 case ARRAY_RANGE_REF:
3349 case BIT_FIELD_REF:
3350 case VIEW_CONVERT_EXPR:
3351 /* We have a nest of references. Verify that each of the operands
3352 that determine where to reference is either a constant or a variable,
3353 verify that the base is valid, and then show we've already checked
3354 the subtrees. */
3355 while (handled_component_p (t))
3357 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3358 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3359 else if (TREE_CODE (t) == ARRAY_REF
3360 || TREE_CODE (t) == ARRAY_RANGE_REF)
3362 CHECK_OP (1, "invalid array index");
3363 if (TREE_OPERAND (t, 2))
3364 CHECK_OP (2, "invalid array lower bound");
3365 if (TREE_OPERAND (t, 3))
3366 CHECK_OP (3, "invalid array stride");
3368 else if (TREE_CODE (t) == BIT_FIELD_REF)
3370 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3371 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3374 t = TREE_OPERAND (t, 0);
3377 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3379 error ("invalid reference prefix");
3380 return t;
3382 *walk_subtrees = 0;
3383 break;
3385 case LT_EXPR:
3386 case LE_EXPR:
3387 case GT_EXPR:
3388 case GE_EXPR:
3389 case EQ_EXPR:
3390 case NE_EXPR:
3391 case UNORDERED_EXPR:
3392 case ORDERED_EXPR:
3393 case UNLT_EXPR:
3394 case UNLE_EXPR:
3395 case UNGT_EXPR:
3396 case UNGE_EXPR:
3397 case UNEQ_EXPR:
3398 case LTGT_EXPR:
3399 case PLUS_EXPR:
3400 case MINUS_EXPR:
3401 case MULT_EXPR:
3402 case TRUNC_DIV_EXPR:
3403 case CEIL_DIV_EXPR:
3404 case FLOOR_DIV_EXPR:
3405 case ROUND_DIV_EXPR:
3406 case TRUNC_MOD_EXPR:
3407 case CEIL_MOD_EXPR:
3408 case FLOOR_MOD_EXPR:
3409 case ROUND_MOD_EXPR:
3410 case RDIV_EXPR:
3411 case EXACT_DIV_EXPR:
3412 case MIN_EXPR:
3413 case MAX_EXPR:
3414 case LSHIFT_EXPR:
3415 case RSHIFT_EXPR:
3416 case LROTATE_EXPR:
3417 case RROTATE_EXPR:
3418 case BIT_IOR_EXPR:
3419 case BIT_XOR_EXPR:
3420 case BIT_AND_EXPR:
3421 CHECK_OP (0, "invalid operand to binary operator");
3422 CHECK_OP (1, "invalid operand to binary operator");
3423 break;
3425 default:
3426 break;
3428 return NULL;
3430 #undef CHECK_OP
3434 /* Verify STMT, return true if STMT is not in GIMPLE form.
3435 TODO: Implement type checking. */
3437 static bool
3438 verify_stmt (tree stmt, bool last_in_block)
3440 tree addr;
3442 if (OMP_DIRECTIVE_P (stmt))
3444 /* OpenMP directives are validated by the FE and never operated
3445 on by the optimizers. Furthermore, OMP_FOR may contain
3446 non-gimple expressions when the main index variable has had
3447 its address taken. This does not affect the loop itself
3448 because the header of an OMP_FOR is merely used to determine
3449 how to setup the parallel iteration. */
3450 return false;
3453 if (!is_gimple_stmt (stmt))
3455 error ("is not a valid GIMPLE statement");
3456 goto fail;
3459 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3460 if (addr)
3462 debug_generic_stmt (addr);
3463 return true;
3466 /* If the statement is marked as part of an EH region, then it is
3467 expected that the statement could throw. Verify that when we
3468 have optimizations that simplify statements such that we prove
3469 that they cannot throw, that we update other data structures
3470 to match. */
3471 if (lookup_stmt_eh_region (stmt) >= 0)
3473 if (!tree_could_throw_p (stmt))
3475 error ("statement marked for throw, but doesn%'t");
3476 goto fail;
3478 if (!last_in_block && tree_can_throw_internal (stmt))
3480 error ("statement marked for throw in middle of block");
3481 goto fail;
3485 return false;
3487 fail:
3488 debug_generic_stmt (stmt);
3489 return true;
3493 /* Return true when the T can be shared. */
3495 static bool
3496 tree_node_can_be_shared (tree t)
3498 if (IS_TYPE_OR_DECL_P (t)
3499 || is_gimple_min_invariant (t)
3500 || TREE_CODE (t) == SSA_NAME
3501 || t == error_mark_node
3502 || TREE_CODE (t) == IDENTIFIER_NODE)
3503 return true;
3505 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3506 return true;
3508 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3509 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3510 || TREE_CODE (t) == COMPONENT_REF
3511 || TREE_CODE (t) == REALPART_EXPR
3512 || TREE_CODE (t) == IMAGPART_EXPR)
3513 t = TREE_OPERAND (t, 0);
3515 if (DECL_P (t))
3516 return true;
3518 return false;
3522 /* Called via walk_trees. Verify tree sharing. */
3524 static tree
3525 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3527 htab_t htab = (htab_t) data;
3528 void **slot;
3530 if (tree_node_can_be_shared (*tp))
3532 *walk_subtrees = false;
3533 return NULL;
3536 slot = htab_find_slot (htab, *tp, INSERT);
3537 if (*slot)
3538 return (tree) *slot;
3539 *slot = *tp;
3541 return NULL;
3545 /* Verify the GIMPLE statement chain. */
3547 void
3548 verify_stmts (void)
3550 basic_block bb;
3551 block_stmt_iterator bsi;
3552 bool err = false;
3553 htab_t htab;
3554 tree addr;
3556 timevar_push (TV_TREE_STMT_VERIFY);
3557 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3559 FOR_EACH_BB (bb)
3561 tree phi;
3562 int i;
3564 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3566 int phi_num_args = PHI_NUM_ARGS (phi);
3568 if (bb_for_stmt (phi) != bb)
3570 error ("bb_for_stmt (phi) is set to a wrong basic block");
3571 err |= true;
3574 for (i = 0; i < phi_num_args; i++)
3576 tree t = PHI_ARG_DEF (phi, i);
3577 tree addr;
3579 /* Addressable variables do have SSA_NAMEs but they
3580 are not considered gimple values. */
3581 if (TREE_CODE (t) != SSA_NAME
3582 && TREE_CODE (t) != FUNCTION_DECL
3583 && !is_gimple_val (t))
3585 error ("PHI def is not a GIMPLE value");
3586 debug_generic_stmt (phi);
3587 debug_generic_stmt (t);
3588 err |= true;
3591 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3592 if (addr)
3594 debug_generic_stmt (addr);
3595 err |= true;
3598 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3599 if (addr)
3601 error ("incorrect sharing of tree nodes");
3602 debug_generic_stmt (phi);
3603 debug_generic_stmt (addr);
3604 err |= true;
3609 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3611 tree stmt = bsi_stmt (bsi);
3613 if (bb_for_stmt (stmt) != bb)
3615 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3616 err |= true;
3619 bsi_next (&bsi);
3620 err |= verify_stmt (stmt, bsi_end_p (bsi));
3621 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3622 if (addr)
3624 error ("incorrect sharing of tree nodes");
3625 debug_generic_stmt (stmt);
3626 debug_generic_stmt (addr);
3627 err |= true;
3632 if (err)
3633 internal_error ("verify_stmts failed");
3635 htab_delete (htab);
3636 timevar_pop (TV_TREE_STMT_VERIFY);
3640 /* Verifies that the flow information is OK. */
3642 static int
3643 tree_verify_flow_info (void)
3645 int err = 0;
3646 basic_block bb;
3647 block_stmt_iterator bsi;
3648 tree stmt;
3649 edge e;
3650 edge_iterator ei;
3652 if (ENTRY_BLOCK_PTR->stmt_list)
3654 error ("ENTRY_BLOCK has a statement list associated with it");
3655 err = 1;
3658 if (EXIT_BLOCK_PTR->stmt_list)
3660 error ("EXIT_BLOCK has a statement list associated with it");
3661 err = 1;
3664 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3665 if (e->flags & EDGE_FALLTHRU)
3667 error ("fallthru to exit from bb %d", e->src->index);
3668 err = 1;
3671 FOR_EACH_BB (bb)
3673 bool found_ctrl_stmt = false;
3675 stmt = NULL_TREE;
3677 /* Skip labels on the start of basic block. */
3678 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3680 tree prev_stmt = stmt;
3682 stmt = bsi_stmt (bsi);
3684 if (TREE_CODE (stmt) != LABEL_EXPR)
3685 break;
3687 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3689 error ("nonlocal label ");
3690 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3691 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3692 bb->index);
3693 err = 1;
3696 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3698 error ("label ");
3699 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3700 fprintf (stderr, " to block does not match in bb %d",
3701 bb->index);
3702 err = 1;
3705 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3706 != current_function_decl)
3708 error ("label ");
3709 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3710 fprintf (stderr, " has incorrect context in bb %d",
3711 bb->index);
3712 err = 1;
3716 /* Verify that body of basic block BB is free of control flow. */
3717 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3719 tree stmt = bsi_stmt (bsi);
3721 if (found_ctrl_stmt)
3723 error ("control flow in the middle of basic block %d",
3724 bb->index);
3725 err = 1;
3728 if (stmt_ends_bb_p (stmt))
3729 found_ctrl_stmt = true;
3731 if (TREE_CODE (stmt) == LABEL_EXPR)
3733 error ("label ");
3734 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3735 fprintf (stderr, " in the middle of basic block %d", bb->index);
3736 err = 1;
3740 bsi = bsi_last (bb);
3741 if (bsi_end_p (bsi))
3742 continue;
3744 stmt = bsi_stmt (bsi);
3746 err |= verify_eh_edges (stmt);
3748 if (is_ctrl_stmt (stmt))
3750 FOR_EACH_EDGE (e, ei, bb->succs)
3751 if (e->flags & EDGE_FALLTHRU)
3753 error ("fallthru edge after a control statement in bb %d",
3754 bb->index);
3755 err = 1;
3759 if (TREE_CODE (stmt) != COND_EXPR)
3761 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3762 after anything else but if statement. */
3763 FOR_EACH_EDGE (e, ei, bb->succs)
3764 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3766 error ("true/false edge after a non-COND_EXPR in bb %d",
3767 bb->index);
3768 err = 1;
3772 switch (TREE_CODE (stmt))
3774 case COND_EXPR:
3776 edge true_edge;
3777 edge false_edge;
3778 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3779 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3781 error ("structured COND_EXPR at the end of bb %d", bb->index);
3782 err = 1;
3785 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3787 if (!true_edge || !false_edge
3788 || !(true_edge->flags & EDGE_TRUE_VALUE)
3789 || !(false_edge->flags & EDGE_FALSE_VALUE)
3790 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3791 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3792 || EDGE_COUNT (bb->succs) >= 3)
3794 error ("wrong outgoing edge flags at end of bb %d",
3795 bb->index);
3796 err = 1;
3799 if (!has_label_p (true_edge->dest,
3800 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3802 error ("%<then%> label does not match edge at end of bb %d",
3803 bb->index);
3804 err = 1;
3807 if (!has_label_p (false_edge->dest,
3808 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3810 error ("%<else%> label does not match edge at end of bb %d",
3811 bb->index);
3812 err = 1;
3815 break;
3817 case GOTO_EXPR:
3818 if (simple_goto_p (stmt))
3820 error ("explicit goto at end of bb %d", bb->index);
3821 err = 1;
3823 else
3825 /* FIXME. We should double check that the labels in the
3826 destination blocks have their address taken. */
3827 FOR_EACH_EDGE (e, ei, bb->succs)
3828 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3829 | EDGE_FALSE_VALUE))
3830 || !(e->flags & EDGE_ABNORMAL))
3832 error ("wrong outgoing edge flags at end of bb %d",
3833 bb->index);
3834 err = 1;
3837 break;
3839 case RETURN_EXPR:
3840 if (!single_succ_p (bb)
3841 || (single_succ_edge (bb)->flags
3842 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3843 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3845 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3846 err = 1;
3848 if (single_succ (bb) != EXIT_BLOCK_PTR)
3850 error ("return edge does not point to exit in bb %d",
3851 bb->index);
3852 err = 1;
3854 break;
3856 case SWITCH_EXPR:
3858 tree prev;
3859 edge e;
3860 size_t i, n;
3861 tree vec;
3863 vec = SWITCH_LABELS (stmt);
3864 n = TREE_VEC_LENGTH (vec);
3866 /* Mark all the destination basic blocks. */
3867 for (i = 0; i < n; ++i)
3869 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3870 basic_block label_bb = label_to_block (lab);
3872 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3873 label_bb->aux = (void *)1;
3876 /* Verify that the case labels are sorted. */
3877 prev = TREE_VEC_ELT (vec, 0);
3878 for (i = 1; i < n - 1; ++i)
3880 tree c = TREE_VEC_ELT (vec, i);
3881 if (! CASE_LOW (c))
3883 error ("found default case not at end of case vector");
3884 err = 1;
3885 continue;
3887 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3889 error ("case labels not sorted: ");
3890 print_generic_expr (stderr, prev, 0);
3891 fprintf (stderr," is greater than ");
3892 print_generic_expr (stderr, c, 0);
3893 fprintf (stderr," but comes before it.\n");
3894 err = 1;
3896 prev = c;
3898 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3900 error ("no default case found at end of case vector");
3901 err = 1;
3904 FOR_EACH_EDGE (e, ei, bb->succs)
3906 if (!e->dest->aux)
3908 error ("extra outgoing edge %d->%d",
3909 bb->index, e->dest->index);
3910 err = 1;
3912 e->dest->aux = (void *)2;
3913 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3914 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3916 error ("wrong outgoing edge flags at end of bb %d",
3917 bb->index);
3918 err = 1;
3922 /* Check that we have all of them. */
3923 for (i = 0; i < n; ++i)
3925 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3926 basic_block label_bb = label_to_block (lab);
3928 if (label_bb->aux != (void *)2)
3930 error ("missing edge %i->%i",
3931 bb->index, label_bb->index);
3932 err = 1;
3936 FOR_EACH_EDGE (e, ei, bb->succs)
3937 e->dest->aux = (void *)0;
3940 default: ;
3944 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3945 verify_dominators (CDI_DOMINATORS);
3947 return err;
3951 /* Updates phi nodes after creating a forwarder block joined
3952 by edge FALLTHRU. */
3954 static void
3955 tree_make_forwarder_block (edge fallthru)
3957 edge e;
3958 edge_iterator ei;
3959 basic_block dummy, bb;
3960 tree phi, new_phi, var;
3962 dummy = fallthru->src;
3963 bb = fallthru->dest;
3965 if (single_pred_p (bb))
3966 return;
3968 /* If we redirected a branch we must create new phi nodes at the
3969 start of BB. */
3970 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3972 var = PHI_RESULT (phi);
3973 new_phi = create_phi_node (var, bb);
3974 SSA_NAME_DEF_STMT (var) = new_phi;
3975 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3976 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3979 /* Ensure that the PHI node chain is in the same order. */
3980 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3982 /* Add the arguments we have stored on edges. */
3983 FOR_EACH_EDGE (e, ei, bb->preds)
3985 if (e == fallthru)
3986 continue;
3988 flush_pending_stmts (e);
3993 /* Return a non-special label in the head of basic block BLOCK.
3994 Create one if it doesn't exist. */
3996 tree
3997 tree_block_label (basic_block bb)
3999 block_stmt_iterator i, s = bsi_start (bb);
4000 bool first = true;
4001 tree label, stmt;
4003 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4005 stmt = bsi_stmt (i);
4006 if (TREE_CODE (stmt) != LABEL_EXPR)
4007 break;
4008 label = LABEL_EXPR_LABEL (stmt);
4009 if (!DECL_NONLOCAL (label))
4011 if (!first)
4012 bsi_move_before (&i, &s);
4013 return label;
4017 label = create_artificial_label ();
4018 stmt = build1 (LABEL_EXPR, void_type_node, label);
4019 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4020 return label;
4024 /* Attempt to perform edge redirection by replacing a possibly complex
4025 jump instruction by a goto or by removing the jump completely.
4026 This can apply only if all edges now point to the same block. The
4027 parameters and return values are equivalent to
4028 redirect_edge_and_branch. */
4030 static edge
4031 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4033 basic_block src = e->src;
4034 block_stmt_iterator b;
4035 tree stmt;
4037 /* We can replace or remove a complex jump only when we have exactly
4038 two edges. */
4039 if (EDGE_COUNT (src->succs) != 2
4040 /* Verify that all targets will be TARGET. Specifically, the
4041 edge that is not E must also go to TARGET. */
4042 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4043 return NULL;
4045 b = bsi_last (src);
4046 if (bsi_end_p (b))
4047 return NULL;
4048 stmt = bsi_stmt (b);
4050 if (TREE_CODE (stmt) == COND_EXPR
4051 || TREE_CODE (stmt) == SWITCH_EXPR)
4053 bsi_remove (&b, true);
4054 e = ssa_redirect_edge (e, target);
4055 e->flags = EDGE_FALLTHRU;
4056 return e;
4059 return NULL;
4063 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4064 edge representing the redirected branch. */
4066 static edge
4067 tree_redirect_edge_and_branch (edge e, basic_block dest)
4069 basic_block bb = e->src;
4070 block_stmt_iterator bsi;
4071 edge ret;
4072 tree label, stmt;
4074 if (e->flags & EDGE_ABNORMAL)
4075 return NULL;
4077 if (e->src != ENTRY_BLOCK_PTR
4078 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4079 return ret;
4081 if (e->dest == dest)
4082 return NULL;
4084 label = tree_block_label (dest);
4086 bsi = bsi_last (bb);
4087 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4089 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4091 case COND_EXPR:
4092 stmt = (e->flags & EDGE_TRUE_VALUE
4093 ? COND_EXPR_THEN (stmt)
4094 : COND_EXPR_ELSE (stmt));
4095 GOTO_DESTINATION (stmt) = label;
4096 break;
4098 case GOTO_EXPR:
4099 /* No non-abnormal edges should lead from a non-simple goto, and
4100 simple ones should be represented implicitly. */
4101 gcc_unreachable ();
4103 case SWITCH_EXPR:
4105 tree cases = get_cases_for_edge (e, stmt);
4107 /* If we have a list of cases associated with E, then use it
4108 as it's a lot faster than walking the entire case vector. */
4109 if (cases)
4111 edge e2 = find_edge (e->src, dest);
4112 tree last, first;
4114 first = cases;
4115 while (cases)
4117 last = cases;
4118 CASE_LABEL (cases) = label;
4119 cases = TREE_CHAIN (cases);
4122 /* If there was already an edge in the CFG, then we need
4123 to move all the cases associated with E to E2. */
4124 if (e2)
4126 tree cases2 = get_cases_for_edge (e2, stmt);
4128 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4129 TREE_CHAIN (cases2) = first;
4132 else
4134 tree vec = SWITCH_LABELS (stmt);
4135 size_t i, n = TREE_VEC_LENGTH (vec);
4137 for (i = 0; i < n; i++)
4139 tree elt = TREE_VEC_ELT (vec, i);
4141 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4142 CASE_LABEL (elt) = label;
4146 break;
4149 case RETURN_EXPR:
4150 bsi_remove (&bsi, true);
4151 e->flags |= EDGE_FALLTHRU;
4152 break;
4154 default:
4155 /* Otherwise it must be a fallthru edge, and we don't need to
4156 do anything besides redirecting it. */
4157 gcc_assert (e->flags & EDGE_FALLTHRU);
4158 break;
4161 /* Update/insert PHI nodes as necessary. */
4163 /* Now update the edges in the CFG. */
4164 e = ssa_redirect_edge (e, dest);
4166 return e;
4170 /* Simple wrapper, as we can always redirect fallthru edges. */
4172 static basic_block
4173 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4175 e = tree_redirect_edge_and_branch (e, dest);
4176 gcc_assert (e);
4178 return NULL;
4182 /* Splits basic block BB after statement STMT (but at least after the
4183 labels). If STMT is NULL, BB is split just after the labels. */
4185 static basic_block
4186 tree_split_block (basic_block bb, void *stmt)
4188 block_stmt_iterator bsi;
4189 tree_stmt_iterator tsi_tgt;
4190 tree act;
4191 basic_block new_bb;
4192 edge e;
4193 edge_iterator ei;
4195 new_bb = create_empty_bb (bb);
4197 /* Redirect the outgoing edges. */
4198 new_bb->succs = bb->succs;
4199 bb->succs = NULL;
4200 FOR_EACH_EDGE (e, ei, new_bb->succs)
4201 e->src = new_bb;
4203 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4204 stmt = NULL;
4206 /* Move everything from BSI to the new basic block. */
4207 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4209 act = bsi_stmt (bsi);
4210 if (TREE_CODE (act) == LABEL_EXPR)
4211 continue;
4213 if (!stmt)
4214 break;
4216 if (stmt == act)
4218 bsi_next (&bsi);
4219 break;
4223 if (bsi_end_p (bsi))
4224 return new_bb;
4226 /* Split the statement list - avoid re-creating new containers as this
4227 brings ugly quadratic memory consumption in the inliner.
4228 (We are still quadratic since we need to update stmt BB pointers,
4229 sadly.) */
4230 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4231 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4232 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4233 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4235 return new_bb;
4239 /* Moves basic block BB after block AFTER. */
4241 static bool
4242 tree_move_block_after (basic_block bb, basic_block after)
4244 if (bb->prev_bb == after)
4245 return true;
4247 unlink_block (bb);
4248 link_block (bb, after);
4250 return true;
4254 /* Return true if basic_block can be duplicated. */
4256 static bool
4257 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4259 return true;
4263 /* Create a duplicate of the basic block BB. NOTE: This does not
4264 preserve SSA form. */
4266 static basic_block
4267 tree_duplicate_bb (basic_block bb)
4269 basic_block new_bb;
4270 block_stmt_iterator bsi, bsi_tgt;
4271 tree phi;
4273 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4275 /* Copy the PHI nodes. We ignore PHI node arguments here because
4276 the incoming edges have not been setup yet. */
4277 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4279 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4280 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4283 /* Keep the chain of PHI nodes in the same order so that they can be
4284 updated by ssa_redirect_edge. */
4285 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4287 bsi_tgt = bsi_start (new_bb);
4288 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4290 def_operand_p def_p;
4291 ssa_op_iter op_iter;
4292 tree stmt, copy;
4293 int region;
4295 stmt = bsi_stmt (bsi);
4296 if (TREE_CODE (stmt) == LABEL_EXPR)
4297 continue;
4299 /* Create a new copy of STMT and duplicate STMT's virtual
4300 operands. */
4301 copy = unshare_expr (stmt);
4302 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4303 copy_virtual_operands (copy, stmt);
4304 region = lookup_stmt_eh_region (stmt);
4305 if (region >= 0)
4306 add_stmt_to_eh_region (copy, region);
4308 /* Create new names for all the definitions created by COPY and
4309 add replacement mappings for each new name. */
4310 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4311 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4314 return new_bb;
4318 /* Basic block BB_COPY was created by code duplication. Add phi node
4319 arguments for edges going out of BB_COPY. The blocks that were
4320 duplicated have BB_DUPLICATED set. */
4322 void
4323 add_phi_args_after_copy_bb (basic_block bb_copy)
4325 basic_block bb, dest;
4326 edge e, e_copy;
4327 edge_iterator ei;
4328 tree phi, phi_copy, phi_next, def;
4330 bb = get_bb_original (bb_copy);
4332 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4334 if (!phi_nodes (e_copy->dest))
4335 continue;
4337 if (e_copy->dest->flags & BB_DUPLICATED)
4338 dest = get_bb_original (e_copy->dest);
4339 else
4340 dest = e_copy->dest;
4342 e = find_edge (bb, dest);
4343 if (!e)
4345 /* During loop unrolling the target of the latch edge is copied.
4346 In this case we are not looking for edge to dest, but to
4347 duplicated block whose original was dest. */
4348 FOR_EACH_EDGE (e, ei, bb->succs)
4349 if ((e->dest->flags & BB_DUPLICATED)
4350 && get_bb_original (e->dest) == dest)
4351 break;
4353 gcc_assert (e != NULL);
4356 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4357 phi;
4358 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4360 phi_next = PHI_CHAIN (phi);
4361 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4362 add_phi_arg (phi_copy, def, e_copy);
4367 /* Blocks in REGION_COPY array of length N_REGION were created by
4368 duplication of basic blocks. Add phi node arguments for edges
4369 going from these blocks. */
4371 void
4372 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4374 unsigned i;
4376 for (i = 0; i < n_region; i++)
4377 region_copy[i]->flags |= BB_DUPLICATED;
4379 for (i = 0; i < n_region; i++)
4380 add_phi_args_after_copy_bb (region_copy[i]);
4382 for (i = 0; i < n_region; i++)
4383 region_copy[i]->flags &= ~BB_DUPLICATED;
4386 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4387 important exit edge EXIT. By important we mean that no SSA name defined
4388 inside region is live over the other exit edges of the region. All entry
4389 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4390 to the duplicate of the region. SSA form, dominance and loop information
4391 is updated. The new basic blocks are stored to REGION_COPY in the same
4392 order as they had in REGION, provided that REGION_COPY is not NULL.
4393 The function returns false if it is unable to copy the region,
4394 true otherwise. */
4396 bool
4397 tree_duplicate_sese_region (edge entry, edge exit,
4398 basic_block *region, unsigned n_region,
4399 basic_block *region_copy)
4401 unsigned i, n_doms;
4402 bool free_region_copy = false, copying_header = false;
4403 struct loop *loop = entry->dest->loop_father;
4404 edge exit_copy;
4405 basic_block *doms;
4406 edge redirected;
4407 int total_freq = 0, entry_freq = 0;
4408 gcov_type total_count = 0, entry_count = 0;
4410 if (!can_copy_bbs_p (region, n_region))
4411 return false;
4413 /* Some sanity checking. Note that we do not check for all possible
4414 missuses of the functions. I.e. if you ask to copy something weird,
4415 it will work, but the state of structures probably will not be
4416 correct. */
4417 for (i = 0; i < n_region; i++)
4419 /* We do not handle subloops, i.e. all the blocks must belong to the
4420 same loop. */
4421 if (region[i]->loop_father != loop)
4422 return false;
4424 if (region[i] != entry->dest
4425 && region[i] == loop->header)
4426 return false;
4429 loop->copy = loop;
4431 /* In case the function is used for loop header copying (which is the primary
4432 use), ensure that EXIT and its copy will be new latch and entry edges. */
4433 if (loop->header == entry->dest)
4435 copying_header = true;
4436 loop->copy = loop->outer;
4438 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4439 return false;
4441 for (i = 0; i < n_region; i++)
4442 if (region[i] != exit->src
4443 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4444 return false;
4447 if (!region_copy)
4449 region_copy = XNEWVEC (basic_block, n_region);
4450 free_region_copy = true;
4453 gcc_assert (!need_ssa_update_p ());
4455 /* Record blocks outside the region that are dominated by something
4456 inside. */
4457 doms = XNEWVEC (basic_block, n_basic_blocks);
4458 initialize_original_copy_tables ();
4460 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4462 if (entry->dest->count)
4464 total_count = entry->dest->count;
4465 entry_count = entry->count;
4466 /* Fix up corner cases, to avoid division by zero or creation of negative
4467 frequencies. */
4468 if (entry_count > total_count)
4469 entry_count = total_count;
4471 else
4473 total_freq = entry->dest->frequency;
4474 entry_freq = EDGE_FREQUENCY (entry);
4475 /* Fix up corner cases, to avoid division by zero or creation of negative
4476 frequencies. */
4477 if (total_freq == 0)
4478 total_freq = 1;
4479 else if (entry_freq > total_freq)
4480 entry_freq = total_freq;
4483 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4484 split_edge_bb_loc (entry));
4485 if (total_count)
4487 scale_bbs_frequencies_gcov_type (region, n_region,
4488 total_count - entry_count,
4489 total_count);
4490 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4491 total_count);
4493 else
4495 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4496 total_freq);
4497 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4500 if (copying_header)
4502 loop->header = exit->dest;
4503 loop->latch = exit->src;
4506 /* Redirect the entry and add the phi node arguments. */
4507 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4508 gcc_assert (redirected != NULL);
4509 flush_pending_stmts (entry);
4511 /* Concerning updating of dominators: We must recount dominators
4512 for entry block and its copy. Anything that is outside of the
4513 region, but was dominated by something inside needs recounting as
4514 well. */
4515 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4516 doms[n_doms++] = get_bb_original (entry->dest);
4517 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4518 free (doms);
4520 /* Add the other PHI node arguments. */
4521 add_phi_args_after_copy (region_copy, n_region);
4523 /* Update the SSA web. */
4524 update_ssa (TODO_update_ssa);
4526 if (free_region_copy)
4527 free (region_copy);
4529 free_original_copy_tables ();
4530 return true;
4534 DEF_VEC_P(basic_block);
4535 DEF_VEC_ALLOC_P(basic_block,heap);
4538 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4539 adding blocks when the dominator traversal reaches EXIT. This
4540 function silently assumes that ENTRY strictly dominates EXIT. */
4542 static void
4543 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4544 VEC(basic_block,heap) **bbs_p)
4546 basic_block son;
4548 for (son = first_dom_son (CDI_DOMINATORS, entry);
4549 son;
4550 son = next_dom_son (CDI_DOMINATORS, son))
4552 VEC_safe_push (basic_block, heap, *bbs_p, son);
4553 if (son != exit)
4554 gather_blocks_in_sese_region (son, exit, bbs_p);
4559 struct move_stmt_d
4561 tree block;
4562 tree from_context;
4563 tree to_context;
4564 bitmap vars_to_remove;
4565 htab_t new_label_map;
4566 bool remap_decls_p;
4569 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4570 contained in *TP and change the DECL_CONTEXT of every local
4571 variable referenced in *TP. */
4573 static tree
4574 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4576 struct move_stmt_d *p = (struct move_stmt_d *) data;
4577 tree t = *tp;
4579 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4580 TREE_BLOCK (t) = p->block;
4582 if (OMP_DIRECTIVE_P (t)
4583 && TREE_CODE (t) != OMP_RETURN
4584 && TREE_CODE (t) != OMP_CONTINUE)
4586 /* Do not remap variables inside OMP directives. Variables
4587 referenced in clauses and directive header belong to the
4588 parent function and should not be moved into the child
4589 function. */
4590 bool save_remap_decls_p = p->remap_decls_p;
4591 p->remap_decls_p = false;
4592 *walk_subtrees = 0;
4594 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4596 p->remap_decls_p = save_remap_decls_p;
4598 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4600 if (TREE_CODE (t) == LABEL_DECL)
4602 if (p->new_label_map)
4604 struct tree_map in, *out;
4605 in.from = t;
4606 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4607 if (out)
4608 *tp = t = out->to;
4611 DECL_CONTEXT (t) = p->to_context;
4613 else if (p->remap_decls_p)
4615 DECL_CONTEXT (t) = p->to_context;
4617 if (TREE_CODE (t) == VAR_DECL)
4619 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4620 f->unexpanded_var_list
4621 = tree_cons (0, t, f->unexpanded_var_list);
4623 /* Mark T to be removed from the original function,
4624 otherwise it will be given a DECL_RTL when the
4625 original function is expanded. */
4626 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4630 else if (TYPE_P (t))
4631 *walk_subtrees = 0;
4633 return NULL_TREE;
4637 /* Move basic block BB from function CFUN to function DEST_FN. The
4638 block is moved out of the original linked list and placed after
4639 block AFTER in the new list. Also, the block is removed from the
4640 original array of blocks and placed in DEST_FN's array of blocks.
4641 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4642 updated to reflect the moved edges.
4644 On exit, local variables that need to be removed from
4645 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4647 static void
4648 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4649 basic_block after, bool update_edge_count_p,
4650 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4652 struct control_flow_graph *cfg;
4653 edge_iterator ei;
4654 edge e;
4655 block_stmt_iterator si;
4656 struct move_stmt_d d;
4657 unsigned old_len, new_len;
4658 basic_block *addr;
4660 /* Link BB to the new linked list. */
4661 move_block_after (bb, after);
4663 /* Update the edge count in the corresponding flowgraphs. */
4664 if (update_edge_count_p)
4665 FOR_EACH_EDGE (e, ei, bb->succs)
4667 cfun->cfg->x_n_edges--;
4668 dest_cfun->cfg->x_n_edges++;
4671 /* Remove BB from the original basic block array. */
4672 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4673 cfun->cfg->x_n_basic_blocks--;
4675 /* Grow DEST_CFUN's basic block array if needed. */
4676 cfg = dest_cfun->cfg;
4677 cfg->x_n_basic_blocks++;
4678 if (bb->index > cfg->x_last_basic_block)
4679 cfg->x_last_basic_block = bb->index;
4681 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4682 if ((unsigned) cfg->x_last_basic_block >= old_len)
4684 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4685 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4686 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4687 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4690 VEC_replace (basic_block, cfg->x_basic_block_info,
4691 cfg->x_last_basic_block, bb);
4693 /* The statements in BB need to be associated with a new TREE_BLOCK.
4694 Labels need to be associated with a new label-to-block map. */
4695 memset (&d, 0, sizeof (d));
4696 d.vars_to_remove = vars_to_remove;
4698 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4700 tree stmt = bsi_stmt (si);
4701 int region;
4703 d.from_context = cfun->decl;
4704 d.to_context = dest_cfun->decl;
4705 d.remap_decls_p = true;
4706 d.new_label_map = new_label_map;
4707 if (TREE_BLOCK (stmt))
4708 d.block = DECL_INITIAL (dest_cfun->decl);
4710 walk_tree (&stmt, move_stmt_r, &d, NULL);
4712 if (TREE_CODE (stmt) == LABEL_EXPR)
4714 tree label = LABEL_EXPR_LABEL (stmt);
4715 int uid = LABEL_DECL_UID (label);
4717 gcc_assert (uid > -1);
4719 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4720 if (old_len <= (unsigned) uid)
4722 new_len = 3 * uid / 2;
4723 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4724 new_len);
4725 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4726 memset (&addr[old_len], 0,
4727 sizeof (basic_block) * (new_len - old_len));
4730 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4731 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4733 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4735 if (uid >= dest_cfun->last_label_uid)
4736 dest_cfun->last_label_uid = uid + 1;
4738 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4739 TREE_OPERAND (stmt, 0) =
4740 build_int_cst (NULL_TREE,
4741 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4742 + eh_offset);
4744 region = lookup_stmt_eh_region (stmt);
4745 if (region >= 0)
4747 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4748 remove_stmt_from_eh_region (stmt);
4753 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4754 the outermost EH region. Use REGION as the incoming base EH region. */
4756 static int
4757 find_outermost_region_in_block (struct function *src_cfun,
4758 basic_block bb, int region)
4760 block_stmt_iterator si;
4762 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4764 tree stmt = bsi_stmt (si);
4765 int stmt_region;
4767 if (TREE_CODE (stmt) == RESX_EXPR)
4768 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4769 else
4770 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4771 if (stmt_region > 0)
4773 if (region < 0)
4774 region = stmt_region;
4775 else if (stmt_region != region)
4777 region = eh_region_outermost (src_cfun, stmt_region, region);
4778 gcc_assert (region != -1);
4783 return region;
4786 static tree
4787 new_label_mapper (tree decl, void *data)
4789 htab_t hash = (htab_t) data;
4790 struct tree_map *m;
4791 void **slot;
4793 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4795 m = xmalloc (sizeof (struct tree_map));
4796 m->hash = DECL_UID (decl);
4797 m->from = decl;
4798 m->to = create_artificial_label ();
4799 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4801 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4802 gcc_assert (*slot == NULL);
4804 *slot = m;
4806 return m->to;
4809 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4810 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4811 single basic block in the original CFG and the new basic block is
4812 returned. DEST_CFUN must not have a CFG yet.
4814 Note that the region need not be a pure SESE region. Blocks inside
4815 the region may contain calls to abort/exit. The only restriction
4816 is that ENTRY_BB should be the only entry point and it must
4817 dominate EXIT_BB.
4819 All local variables referenced in the region are assumed to be in
4820 the corresponding BLOCK_VARS and unexpanded variable lists
4821 associated with DEST_CFUN. */
4823 basic_block
4824 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4825 basic_block exit_bb)
4827 VEC(basic_block,heap) *bbs;
4828 basic_block after, bb, *entry_pred, *exit_succ;
4829 struct function *saved_cfun;
4830 int *entry_flag, *exit_flag, eh_offset;
4831 unsigned i, num_entry_edges, num_exit_edges;
4832 edge e;
4833 edge_iterator ei;
4834 bitmap vars_to_remove;
4835 htab_t new_label_map;
4837 saved_cfun = cfun;
4839 /* Collect all the blocks in the region. Manually add ENTRY_BB
4840 because it won't be added by dfs_enumerate_from. */
4841 calculate_dominance_info (CDI_DOMINATORS);
4843 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4844 region. */
4845 gcc_assert (entry_bb != exit_bb
4846 && (!exit_bb
4847 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4849 bbs = NULL;
4850 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4851 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4853 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4854 the predecessor edges to ENTRY_BB and the successor edges to
4855 EXIT_BB so that we can re-attach them to the new basic block that
4856 will replace the region. */
4857 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4858 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4859 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4860 i = 0;
4861 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4863 entry_flag[i] = e->flags;
4864 entry_pred[i++] = e->src;
4865 remove_edge (e);
4868 if (exit_bb)
4870 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4871 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4872 sizeof (basic_block));
4873 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4874 i = 0;
4875 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4877 exit_flag[i] = e->flags;
4878 exit_succ[i++] = e->dest;
4879 remove_edge (e);
4882 else
4884 num_exit_edges = 0;
4885 exit_succ = NULL;
4886 exit_flag = NULL;
4889 /* Switch context to the child function to initialize DEST_FN's CFG. */
4890 gcc_assert (dest_cfun->cfg == NULL);
4891 cfun = dest_cfun;
4893 init_empty_tree_cfg ();
4895 /* Initialize EH information for the new function. */
4896 eh_offset = 0;
4897 new_label_map = NULL;
4898 if (saved_cfun->eh)
4900 int region = -1;
4902 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4903 region = find_outermost_region_in_block (saved_cfun, bb, region);
4905 init_eh_for_function ();
4906 if (region != -1)
4908 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4909 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4910 new_label_map, region, 0);
4914 cfun = saved_cfun;
4916 /* Move blocks from BBS into DEST_CFUN. */
4917 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4918 after = dest_cfun->cfg->x_entry_block_ptr;
4919 vars_to_remove = BITMAP_ALLOC (NULL);
4920 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4922 /* No need to update edge counts on the last block. It has
4923 already been updated earlier when we detached the region from
4924 the original CFG. */
4925 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4926 new_label_map, eh_offset);
4927 after = bb;
4930 if (new_label_map)
4931 htab_delete (new_label_map);
4933 /* Remove the variables marked in VARS_TO_REMOVE from
4934 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4935 DECL_RTL in the context of CFUN. */
4936 if (!bitmap_empty_p (vars_to_remove))
4938 tree *p;
4940 for (p = &cfun->unexpanded_var_list; *p; )
4942 tree var = TREE_VALUE (*p);
4943 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4945 *p = TREE_CHAIN (*p);
4946 continue;
4949 p = &TREE_CHAIN (*p);
4953 BITMAP_FREE (vars_to_remove);
4955 /* Rewire the entry and exit blocks. The successor to the entry
4956 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4957 the child function. Similarly, the predecessor of DEST_FN's
4958 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4959 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4960 various CFG manipulation function get to the right CFG.
4962 FIXME, this is silly. The CFG ought to become a parameter to
4963 these helpers. */
4964 cfun = dest_cfun;
4965 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4966 if (exit_bb)
4967 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4968 cfun = saved_cfun;
4970 /* Back in the original function, the SESE region has disappeared,
4971 create a new basic block in its place. */
4972 bb = create_empty_bb (entry_pred[0]);
4973 for (i = 0; i < num_entry_edges; i++)
4974 make_edge (entry_pred[i], bb, entry_flag[i]);
4976 for (i = 0; i < num_exit_edges; i++)
4977 make_edge (bb, exit_succ[i], exit_flag[i]);
4979 if (exit_bb)
4981 free (exit_flag);
4982 free (exit_succ);
4984 free (entry_flag);
4985 free (entry_pred);
4986 free_dominance_info (CDI_DOMINATORS);
4987 free_dominance_info (CDI_POST_DOMINATORS);
4988 VEC_free (basic_block, heap, bbs);
4990 return bb;
4994 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4996 void
4997 dump_function_to_file (tree fn, FILE *file, int flags)
4999 tree arg, vars, var;
5000 bool ignore_topmost_bind = false, any_var = false;
5001 basic_block bb;
5002 tree chain;
5003 struct function *saved_cfun;
5005 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5007 arg = DECL_ARGUMENTS (fn);
5008 while (arg)
5010 print_generic_expr (file, arg, dump_flags);
5011 if (TREE_CHAIN (arg))
5012 fprintf (file, ", ");
5013 arg = TREE_CHAIN (arg);
5015 fprintf (file, ")\n");
5017 if (flags & TDF_DETAILS)
5018 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5019 if (flags & TDF_RAW)
5021 dump_node (fn, TDF_SLIM | flags, file);
5022 return;
5025 /* Switch CFUN to point to FN. */
5026 saved_cfun = cfun;
5027 cfun = DECL_STRUCT_FUNCTION (fn);
5029 /* When GIMPLE is lowered, the variables are no longer available in
5030 BIND_EXPRs, so display them separately. */
5031 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5033 ignore_topmost_bind = true;
5035 fprintf (file, "{\n");
5036 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5038 var = TREE_VALUE (vars);
5040 print_generic_decl (file, var, flags);
5041 fprintf (file, "\n");
5043 any_var = true;
5047 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5049 /* Make a CFG based dump. */
5050 check_bb_profile (ENTRY_BLOCK_PTR, file);
5051 if (!ignore_topmost_bind)
5052 fprintf (file, "{\n");
5054 if (any_var && n_basic_blocks)
5055 fprintf (file, "\n");
5057 FOR_EACH_BB (bb)
5058 dump_generic_bb (file, bb, 2, flags);
5060 fprintf (file, "}\n");
5061 check_bb_profile (EXIT_BLOCK_PTR, file);
5063 else
5065 int indent;
5067 /* Make a tree based dump. */
5068 chain = DECL_SAVED_TREE (fn);
5070 if (chain && TREE_CODE (chain) == BIND_EXPR)
5072 if (ignore_topmost_bind)
5074 chain = BIND_EXPR_BODY (chain);
5075 indent = 2;
5077 else
5078 indent = 0;
5080 else
5082 if (!ignore_topmost_bind)
5083 fprintf (file, "{\n");
5084 indent = 2;
5087 if (any_var)
5088 fprintf (file, "\n");
5090 print_generic_stmt_indented (file, chain, flags, indent);
5091 if (ignore_topmost_bind)
5092 fprintf (file, "}\n");
5095 fprintf (file, "\n\n");
5097 /* Restore CFUN. */
5098 cfun = saved_cfun;
5102 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5104 void
5105 debug_function (tree fn, int flags)
5107 dump_function_to_file (fn, stderr, flags);
5111 /* Pretty print of the loops intermediate representation. */
5112 static void print_loop (FILE *, struct loop *, int);
5113 static void print_pred_bbs (FILE *, basic_block bb);
5114 static void print_succ_bbs (FILE *, basic_block bb);
5117 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5119 static void
5120 print_pred_bbs (FILE *file, basic_block bb)
5122 edge e;
5123 edge_iterator ei;
5125 FOR_EACH_EDGE (e, ei, bb->preds)
5126 fprintf (file, "bb_%d ", e->src->index);
5130 /* Print on FILE the indexes for the successors of basic_block BB. */
5132 static void
5133 print_succ_bbs (FILE *file, basic_block bb)
5135 edge e;
5136 edge_iterator ei;
5138 FOR_EACH_EDGE (e, ei, bb->succs)
5139 fprintf (file, "bb_%d ", e->dest->index);
5143 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5145 static void
5146 print_loop (FILE *file, struct loop *loop, int indent)
5148 char *s_indent;
5149 basic_block bb;
5151 if (loop == NULL)
5152 return;
5154 s_indent = (char *) alloca ((size_t) indent + 1);
5155 memset ((void *) s_indent, ' ', (size_t) indent);
5156 s_indent[indent] = '\0';
5158 /* Print the loop's header. */
5159 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5161 /* Print the loop's body. */
5162 fprintf (file, "%s{\n", s_indent);
5163 FOR_EACH_BB (bb)
5164 if (bb->loop_father == loop)
5166 /* Print the basic_block's header. */
5167 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5168 print_pred_bbs (file, bb);
5169 fprintf (file, "}, succs = {");
5170 print_succ_bbs (file, bb);
5171 fprintf (file, "})\n");
5173 /* Print the basic_block's body. */
5174 fprintf (file, "%s {\n", s_indent);
5175 tree_dump_bb (bb, file, indent + 4);
5176 fprintf (file, "%s }\n", s_indent);
5179 print_loop (file, loop->inner, indent + 2);
5180 fprintf (file, "%s}\n", s_indent);
5181 print_loop (file, loop->next, indent);
5185 /* Follow a CFG edge from the entry point of the program, and on entry
5186 of a loop, pretty print the loop structure on FILE. */
5188 void
5189 print_loop_ir (FILE *file)
5191 basic_block bb;
5193 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5194 if (bb && bb->loop_father)
5195 print_loop (file, bb->loop_father, 0);
5199 /* Debugging loops structure at tree level. */
5201 void
5202 debug_loop_ir (void)
5204 print_loop_ir (stderr);
5208 /* Return true if BB ends with a call, possibly followed by some
5209 instructions that must stay with the call. Return false,
5210 otherwise. */
5212 static bool
5213 tree_block_ends_with_call_p (basic_block bb)
5215 block_stmt_iterator bsi = bsi_last (bb);
5216 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5220 /* Return true if BB ends with a conditional branch. Return false,
5221 otherwise. */
5223 static bool
5224 tree_block_ends_with_condjump_p (basic_block bb)
5226 tree stmt = last_stmt (bb);
5227 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5231 /* Return true if we need to add fake edge to exit at statement T.
5232 Helper function for tree_flow_call_edges_add. */
5234 static bool
5235 need_fake_edge_p (tree t)
5237 tree call;
5239 /* NORETURN and LONGJMP calls already have an edge to exit.
5240 CONST and PURE calls do not need one.
5241 We don't currently check for CONST and PURE here, although
5242 it would be a good idea, because those attributes are
5243 figured out from the RTL in mark_constant_function, and
5244 the counter incrementation code from -fprofile-arcs
5245 leads to different results from -fbranch-probabilities. */
5246 call = get_call_expr_in (t);
5247 if (call
5248 && !(call_expr_flags (call) & ECF_NORETURN))
5249 return true;
5251 if (TREE_CODE (t) == ASM_EXPR
5252 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5253 return true;
5255 return false;
5259 /* Add fake edges to the function exit for any non constant and non
5260 noreturn calls, volatile inline assembly in the bitmap of blocks
5261 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5262 the number of blocks that were split.
5264 The goal is to expose cases in which entering a basic block does
5265 not imply that all subsequent instructions must be executed. */
5267 static int
5268 tree_flow_call_edges_add (sbitmap blocks)
5270 int i;
5271 int blocks_split = 0;
5272 int last_bb = last_basic_block;
5273 bool check_last_block = false;
5275 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5276 return 0;
5278 if (! blocks)
5279 check_last_block = true;
5280 else
5281 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5283 /* In the last basic block, before epilogue generation, there will be
5284 a fallthru edge to EXIT. Special care is required if the last insn
5285 of the last basic block is a call because make_edge folds duplicate
5286 edges, which would result in the fallthru edge also being marked
5287 fake, which would result in the fallthru edge being removed by
5288 remove_fake_edges, which would result in an invalid CFG.
5290 Moreover, we can't elide the outgoing fake edge, since the block
5291 profiler needs to take this into account in order to solve the minimal
5292 spanning tree in the case that the call doesn't return.
5294 Handle this by adding a dummy instruction in a new last basic block. */
5295 if (check_last_block)
5297 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5298 block_stmt_iterator bsi = bsi_last (bb);
5299 tree t = NULL_TREE;
5300 if (!bsi_end_p (bsi))
5301 t = bsi_stmt (bsi);
5303 if (t && need_fake_edge_p (t))
5305 edge e;
5307 e = find_edge (bb, EXIT_BLOCK_PTR);
5308 if (e)
5310 bsi_insert_on_edge (e, build_empty_stmt ());
5311 bsi_commit_edge_inserts ();
5316 /* Now add fake edges to the function exit for any non constant
5317 calls since there is no way that we can determine if they will
5318 return or not... */
5319 for (i = 0; i < last_bb; i++)
5321 basic_block bb = BASIC_BLOCK (i);
5322 block_stmt_iterator bsi;
5323 tree stmt, last_stmt;
5325 if (!bb)
5326 continue;
5328 if (blocks && !TEST_BIT (blocks, i))
5329 continue;
5331 bsi = bsi_last (bb);
5332 if (!bsi_end_p (bsi))
5334 last_stmt = bsi_stmt (bsi);
5337 stmt = bsi_stmt (bsi);
5338 if (need_fake_edge_p (stmt))
5340 edge e;
5341 /* The handling above of the final block before the
5342 epilogue should be enough to verify that there is
5343 no edge to the exit block in CFG already.
5344 Calling make_edge in such case would cause us to
5345 mark that edge as fake and remove it later. */
5346 #ifdef ENABLE_CHECKING
5347 if (stmt == last_stmt)
5349 e = find_edge (bb, EXIT_BLOCK_PTR);
5350 gcc_assert (e == NULL);
5352 #endif
5354 /* Note that the following may create a new basic block
5355 and renumber the existing basic blocks. */
5356 if (stmt != last_stmt)
5358 e = split_block (bb, stmt);
5359 if (e)
5360 blocks_split++;
5362 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5364 bsi_prev (&bsi);
5366 while (!bsi_end_p (bsi));
5370 if (blocks_split)
5371 verify_flow_info ();
5373 return blocks_split;
5376 /* Purge dead abnormal call edges from basic block BB. */
5378 bool
5379 tree_purge_dead_abnormal_call_edges (basic_block bb)
5381 bool changed = tree_purge_dead_eh_edges (bb);
5383 if (current_function_has_nonlocal_label)
5385 tree stmt = last_stmt (bb);
5386 edge_iterator ei;
5387 edge e;
5389 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5390 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5392 if (e->flags & EDGE_ABNORMAL)
5394 remove_edge (e);
5395 changed = true;
5397 else
5398 ei_next (&ei);
5401 /* See tree_purge_dead_eh_edges below. */
5402 if (changed)
5403 free_dominance_info (CDI_DOMINATORS);
5406 return changed;
5409 /* Purge dead EH edges from basic block BB. */
5411 bool
5412 tree_purge_dead_eh_edges (basic_block bb)
5414 bool changed = false;
5415 edge e;
5416 edge_iterator ei;
5417 tree stmt = last_stmt (bb);
5419 if (stmt && tree_can_throw_internal (stmt))
5420 return false;
5422 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5424 if (e->flags & EDGE_EH)
5426 remove_edge (e);
5427 changed = true;
5429 else
5430 ei_next (&ei);
5433 /* Removal of dead EH edges might change dominators of not
5434 just immediate successors. E.g. when bb1 is changed so that
5435 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5436 eh edges purged by this function in:
5440 1-->2
5441 / \ |
5442 v v |
5443 3-->4 |
5445 --->5
5448 idom(bb5) must be recomputed. For now just free the dominance
5449 info. */
5450 if (changed)
5451 free_dominance_info (CDI_DOMINATORS);
5453 return changed;
5456 bool
5457 tree_purge_all_dead_eh_edges (bitmap blocks)
5459 bool changed = false;
5460 unsigned i;
5461 bitmap_iterator bi;
5463 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5465 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5468 return changed;
5471 /* This function is called whenever a new edge is created or
5472 redirected. */
5474 static void
5475 tree_execute_on_growing_pred (edge e)
5477 basic_block bb = e->dest;
5479 if (phi_nodes (bb))
5480 reserve_phi_args_for_new_edge (bb);
5483 /* This function is called immediately before edge E is removed from
5484 the edge vector E->dest->preds. */
5486 static void
5487 tree_execute_on_shrinking_pred (edge e)
5489 if (phi_nodes (e->dest))
5490 remove_phi_args (e);
5493 /*---------------------------------------------------------------------------
5494 Helper functions for Loop versioning
5495 ---------------------------------------------------------------------------*/
5497 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5498 of 'first'. Both of them are dominated by 'new_head' basic block. When
5499 'new_head' was created by 'second's incoming edge it received phi arguments
5500 on the edge by split_edge(). Later, additional edge 'e' was created to
5501 connect 'new_head' and 'first'. Now this routine adds phi args on this
5502 additional edge 'e' that new_head to second edge received as part of edge
5503 splitting.
5506 static void
5507 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5508 basic_block new_head, edge e)
5510 tree phi1, phi2;
5511 edge e2 = find_edge (new_head, second);
5513 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5514 edge, we should always have an edge from NEW_HEAD to SECOND. */
5515 gcc_assert (e2 != NULL);
5517 /* Browse all 'second' basic block phi nodes and add phi args to
5518 edge 'e' for 'first' head. PHI args are always in correct order. */
5520 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5521 phi2 && phi1;
5522 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5524 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5525 add_phi_arg (phi1, def, e);
5529 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5530 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5531 the destination of the ELSE part. */
5532 static void
5533 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5534 basic_block cond_bb, void *cond_e)
5536 block_stmt_iterator bsi;
5537 tree goto1 = NULL_TREE;
5538 tree goto2 = NULL_TREE;
5539 tree new_cond_expr = NULL_TREE;
5540 tree cond_expr = (tree) cond_e;
5541 edge e0;
5543 /* Build new conditional expr */
5544 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5545 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5546 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5548 /* Add new cond in cond_bb. */
5549 bsi = bsi_start (cond_bb);
5550 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5551 /* Adjust edges appropriately to connect new head with first head
5552 as well as second head. */
5553 e0 = single_succ_edge (cond_bb);
5554 e0->flags &= ~EDGE_FALLTHRU;
5555 e0->flags |= EDGE_FALSE_VALUE;
5558 struct cfg_hooks tree_cfg_hooks = {
5559 "tree",
5560 tree_verify_flow_info,
5561 tree_dump_bb, /* dump_bb */
5562 create_bb, /* create_basic_block */
5563 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5564 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5565 remove_bb, /* delete_basic_block */
5566 tree_split_block, /* split_block */
5567 tree_move_block_after, /* move_block_after */
5568 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5569 tree_merge_blocks, /* merge_blocks */
5570 tree_predict_edge, /* predict_edge */
5571 tree_predicted_by_p, /* predicted_by_p */
5572 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5573 tree_duplicate_bb, /* duplicate_block */
5574 tree_split_edge, /* split_edge */
5575 tree_make_forwarder_block, /* make_forward_block */
5576 NULL, /* tidy_fallthru_edge */
5577 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5578 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5579 tree_flow_call_edges_add, /* flow_call_edges_add */
5580 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5581 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5582 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5583 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5584 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5585 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5586 flush_pending_stmts /* flush_pending_stmts */
5590 /* Split all critical edges. */
5592 static unsigned int
5593 split_critical_edges (void)
5595 basic_block bb;
5596 edge e;
5597 edge_iterator ei;
5599 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5600 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5601 mappings around the calls to split_edge. */
5602 start_recording_case_labels ();
5603 FOR_ALL_BB (bb)
5605 FOR_EACH_EDGE (e, ei, bb->succs)
5606 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5608 split_edge (e);
5611 end_recording_case_labels ();
5612 return 0;
5615 struct tree_opt_pass pass_split_crit_edges =
5617 "crited", /* name */
5618 NULL, /* gate */
5619 split_critical_edges, /* execute */
5620 NULL, /* sub */
5621 NULL, /* next */
5622 0, /* static_pass_number */
5623 TV_TREE_SPLIT_EDGES, /* tv_id */
5624 PROP_cfg, /* properties required */
5625 PROP_no_crit_edges, /* properties_provided */
5626 0, /* properties_destroyed */
5627 0, /* todo_flags_start */
5628 TODO_dump_func, /* todo_flags_finish */
5629 0 /* letter */
5633 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5634 a temporary, make sure and register it to be renamed if necessary,
5635 and finally return the temporary. Put the statements to compute
5636 EXP before the current statement in BSI. */
5638 tree
5639 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5641 tree t, new_stmt, orig_stmt;
5643 if (is_gimple_val (exp))
5644 return exp;
5646 t = make_rename_temp (type, NULL);
5647 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5649 orig_stmt = bsi_stmt (*bsi);
5650 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5651 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5653 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5654 if (in_ssa_p)
5655 mark_new_vars_to_rename (new_stmt);
5657 return t;
5660 /* Build a ternary operation and gimplify it. Emit code before BSI.
5661 Return the gimple_val holding the result. */
5663 tree
5664 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5665 tree type, tree a, tree b, tree c)
5667 tree ret;
5669 ret = fold_build3 (code, type, a, b, c);
5670 STRIP_NOPS (ret);
5672 return gimplify_val (bsi, type, ret);
5675 /* Build a binary operation and gimplify it. Emit code before BSI.
5676 Return the gimple_val holding the result. */
5678 tree
5679 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5680 tree type, tree a, tree b)
5682 tree ret;
5684 ret = fold_build2 (code, type, a, b);
5685 STRIP_NOPS (ret);
5687 return gimplify_val (bsi, type, ret);
5690 /* Build a unary operation and gimplify it. Emit code before BSI.
5691 Return the gimple_val holding the result. */
5693 tree
5694 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5695 tree a)
5697 tree ret;
5699 ret = fold_build1 (code, type, a);
5700 STRIP_NOPS (ret);
5702 return gimplify_val (bsi, type, ret);
5707 /* Emit return warnings. */
5709 static unsigned int
5710 execute_warn_function_return (void)
5712 #ifdef USE_MAPPED_LOCATION
5713 source_location location;
5714 #else
5715 location_t *locus;
5716 #endif
5717 tree last;
5718 edge e;
5719 edge_iterator ei;
5721 /* If we have a path to EXIT, then we do return. */
5722 if (TREE_THIS_VOLATILE (cfun->decl)
5723 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5725 #ifdef USE_MAPPED_LOCATION
5726 location = UNKNOWN_LOCATION;
5727 #else
5728 locus = NULL;
5729 #endif
5730 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5732 last = last_stmt (e->src);
5733 if (TREE_CODE (last) == RETURN_EXPR
5734 #ifdef USE_MAPPED_LOCATION
5735 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5736 #else
5737 && (locus = EXPR_LOCUS (last)) != NULL)
5738 #endif
5739 break;
5741 #ifdef USE_MAPPED_LOCATION
5742 if (location == UNKNOWN_LOCATION)
5743 location = cfun->function_end_locus;
5744 warning (0, "%H%<noreturn%> function does return", &location);
5745 #else
5746 if (!locus)
5747 locus = &cfun->function_end_locus;
5748 warning (0, "%H%<noreturn%> function does return", locus);
5749 #endif
5752 /* If we see "return;" in some basic block, then we do reach the end
5753 without returning a value. */
5754 else if (warn_return_type
5755 && !TREE_NO_WARNING (cfun->decl)
5756 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5757 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5759 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5761 tree last = last_stmt (e->src);
5762 if (TREE_CODE (last) == RETURN_EXPR
5763 && TREE_OPERAND (last, 0) == NULL
5764 && !TREE_NO_WARNING (last))
5766 #ifdef USE_MAPPED_LOCATION
5767 location = EXPR_LOCATION (last);
5768 if (location == UNKNOWN_LOCATION)
5769 location = cfun->function_end_locus;
5770 warning (0, "%Hcontrol reaches end of non-void function", &location);
5771 #else
5772 locus = EXPR_LOCUS (last);
5773 if (!locus)
5774 locus = &cfun->function_end_locus;
5775 warning (0, "%Hcontrol reaches end of non-void function", locus);
5776 #endif
5777 TREE_NO_WARNING (cfun->decl) = 1;
5778 break;
5782 return 0;
5786 /* Given a basic block B which ends with a conditional and has
5787 precisely two successors, determine which of the edges is taken if
5788 the conditional is true and which is taken if the conditional is
5789 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5791 void
5792 extract_true_false_edges_from_block (basic_block b,
5793 edge *true_edge,
5794 edge *false_edge)
5796 edge e = EDGE_SUCC (b, 0);
5798 if (e->flags & EDGE_TRUE_VALUE)
5800 *true_edge = e;
5801 *false_edge = EDGE_SUCC (b, 1);
5803 else
5805 *false_edge = e;
5806 *true_edge = EDGE_SUCC (b, 1);
5810 struct tree_opt_pass pass_warn_function_return =
5812 NULL, /* name */
5813 NULL, /* gate */
5814 execute_warn_function_return, /* execute */
5815 NULL, /* sub */
5816 NULL, /* next */
5817 0, /* static_pass_number */
5818 0, /* tv_id */
5819 PROP_cfg, /* properties_required */
5820 0, /* properties_provided */
5821 0, /* properties_destroyed */
5822 0, /* todo_flags_start */
5823 0, /* todo_flags_finish */
5824 0 /* letter */
5827 /* Emit noreturn warnings. */
5829 static unsigned int
5830 execute_warn_function_noreturn (void)
5832 if (warn_missing_noreturn
5833 && !TREE_THIS_VOLATILE (cfun->decl)
5834 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5835 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5836 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5837 "for attribute %<noreturn%>",
5838 cfun->decl);
5839 return 0;
5842 struct tree_opt_pass pass_warn_function_noreturn =
5844 NULL, /* name */
5845 NULL, /* gate */
5846 execute_warn_function_noreturn, /* execute */
5847 NULL, /* sub */
5848 NULL, /* next */
5849 0, /* static_pass_number */
5850 0, /* tv_id */
5851 PROP_cfg, /* properties_required */
5852 0, /* properties_provided */
5853 0, /* properties_destroyed */
5854 0, /* todo_flags_start */
5855 0, /* todo_flags_finish */
5856 0 /* letter */