Add files that I missed when importing NaCl changes earlier
[gcc/nacl-gcc.git] / gcc / tree-cfg.c
blob24e629b7e4fef35515eb4f5b681f190bcacdb2fa
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
100 /* Edges. */
101 static void make_edges (void);
102 static void make_cond_expr_edges (basic_block);
103 static void make_switch_expr_edges (basic_block);
104 static void make_goto_expr_edges (basic_block);
105 static edge tree_redirect_edge_and_branch (edge, basic_block);
106 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
107 static unsigned int split_critical_edges (void);
109 /* Various helpers. */
110 static inline bool stmt_starts_bb_p (tree, tree);
111 static int tree_verify_flow_info (void);
112 static void tree_make_forwarder_block (edge);
113 static void tree_cfg2vcg (FILE *);
114 static inline void change_bb_for_stmt (tree t, basic_block bb);
116 /* Flowgraph optimization and cleanup. */
117 static void tree_merge_blocks (basic_block, basic_block);
118 static bool tree_can_merge_blocks_p (basic_block, basic_block);
119 static void remove_bb (basic_block);
120 static edge find_taken_edge_computed_goto (basic_block, tree);
121 static edge find_taken_edge_cond_expr (basic_block, tree);
122 static edge find_taken_edge_switch_expr (basic_block, tree);
123 static tree find_case_label_for_value (tree, tree);
125 void
126 init_empty_tree_cfg (void)
128 /* Initialize the basic block array. */
129 init_flow ();
130 profile_status = PROFILE_ABSENT;
131 n_basic_blocks = NUM_FIXED_BLOCKS;
132 last_basic_block = NUM_FIXED_BLOCKS;
133 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
134 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
135 memset (VEC_address (basic_block, basic_block_info), 0,
136 sizeof (basic_block) * initial_cfg_capacity);
138 /* Build a mapping of labels to their associated blocks. */
139 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
140 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
141 memset (VEC_address (basic_block, label_to_block_map),
142 0, sizeof (basic_block) * initial_cfg_capacity);
144 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
145 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
146 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
147 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
150 /*---------------------------------------------------------------------------
151 Create basic blocks
152 ---------------------------------------------------------------------------*/
154 /* Entry point to the CFG builder for trees. TP points to the list of
155 statements to be added to the flowgraph. */
157 static void
158 build_tree_cfg (tree *tp)
160 /* Register specific tree functions. */
161 tree_register_cfg_hooks ();
163 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165 init_empty_tree_cfg ();
167 found_computed_goto = 0;
168 make_blocks (*tp);
170 /* Computed gotos are hell to deal with, especially if there are
171 lots of them with a large number of destinations. So we factor
172 them to a common computed goto location before we build the
173 edge list. After we convert back to normal form, we will un-factor
174 the computed gotos since factoring introduces an unwanted jump. */
175 if (found_computed_goto)
176 factor_computed_gotos ();
178 /* Make sure there is always at least one block, even if it's empty. */
179 if (n_basic_blocks == NUM_FIXED_BLOCKS)
180 create_empty_bb (ENTRY_BLOCK_PTR);
182 /* Adjust the size of the array. */
183 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 size_t old_size = VEC_length (basic_block, basic_block_info);
186 basic_block *p;
187 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
188 p = VEC_address (basic_block, basic_block_info);
189 memset (&p[old_size], 0,
190 sizeof (basic_block) * (n_basic_blocks - old_size));
193 /* To speed up statement iterator walks, we first purge dead labels. */
194 cleanup_dead_labels ();
196 /* Group case nodes to reduce the number of edges.
197 We do this after cleaning up dead labels because otherwise we miss
198 a lot of obvious case merging opportunities. */
199 group_case_labels ();
201 /* Create the edges of the flowgraph. */
202 make_edges ();
204 /* Debugging dumps. */
206 /* Write the flowgraph to a VCG file. */
208 int local_dump_flags;
209 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
210 if (vcg_file)
212 tree_cfg2vcg (vcg_file);
213 dump_end (TDI_vcg, vcg_file);
217 #ifdef ENABLE_CHECKING
218 verify_stmts ();
219 #endif
221 /* Dump a textual representation of the flowgraph. */
222 if (dump_file)
223 dump_tree_cfg (dump_file, dump_flags);
226 static unsigned int
227 execute_build_cfg (void)
229 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
230 return 0;
233 struct tree_opt_pass pass_build_cfg =
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts, /* todo_flags_finish */
247 0 /* letter */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
253 normal form. */
255 static void
256 factor_computed_gotos (void)
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
268 FOR_EACH_BB (bb)
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
285 tree assignment;
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build2 (MODIFY_EXPR, ptr_type_node,
317 var, GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
329 static void
330 make_blocks (tree stmt_list)
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
338 while (!tsi_end_p (i))
340 tree prev_stmt;
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
368 tsi_next (&i);
369 first_stmt_of_list = false;
374 /* Create and return a new empty basic block after bb AFTER. */
376 static basic_block
377 create_bb (void *h, void *e, basic_block after)
379 basic_block bb;
381 gcc_assert (!e);
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
386 bb = alloc_block ();
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
390 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392 /* Add the new block to the linked list of blocks. */
393 link_block (bb, after);
395 /* Grow the basic block array if needed. */
396 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 size_t old_size = VEC_length (basic_block, basic_block_info);
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 basic_block *p;
401 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
402 p = VEC_address (basic_block, basic_block_info);
403 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
406 /* Add the newly created block to the array. */
407 SET_BASIC_BLOCK (last_basic_block, bb);
409 n_basic_blocks++;
410 last_basic_block++;
412 return bb;
416 /*---------------------------------------------------------------------------
417 Edge creation
418 ---------------------------------------------------------------------------*/
420 /* Fold COND_EXPR_COND of each COND_EXPR. */
422 void
423 fold_cond_expr_cond (void)
425 basic_block bb;
427 FOR_EACH_BB (bb)
429 tree stmt = last_stmt (bb);
431 if (stmt
432 && TREE_CODE (stmt) == COND_EXPR)
434 tree cond;
435 bool zerop, onep;
437 fold_defer_overflow_warnings ();
438 cond = fold (COND_EXPR_COND (stmt));
439 zerop = integer_zerop (cond);
440 onep = integer_onep (cond);
441 fold_undefer_overflow_warnings (((zerop || onep)
442 && !TREE_NO_WARNING (stmt)),
443 stmt,
444 WARN_STRICT_OVERFLOW_CONDITIONAL);
445 if (zerop)
446 COND_EXPR_COND (stmt) = boolean_false_node;
447 else if (onep)
448 COND_EXPR_COND (stmt) = boolean_true_node;
453 /* Join all the blocks in the flowgraph. */
455 static void
456 make_edges (void)
458 basic_block bb;
459 struct omp_region *cur_region = NULL;
461 /* Create an edge from entry to the first block with executable
462 statements in it. */
463 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
465 /* Traverse the basic block array placing edges. */
466 FOR_EACH_BB (bb)
468 tree last = last_stmt (bb);
469 bool fallthru;
471 if (last)
473 enum tree_code code = TREE_CODE (last);
474 switch (code)
476 case GOTO_EXPR:
477 make_goto_expr_edges (bb);
478 fallthru = false;
479 break;
480 case RETURN_EXPR:
481 make_edge (bb, EXIT_BLOCK_PTR, 0);
482 fallthru = false;
483 break;
484 case COND_EXPR:
485 make_cond_expr_edges (bb);
486 fallthru = false;
487 break;
488 case SWITCH_EXPR:
489 make_switch_expr_edges (bb);
490 fallthru = false;
491 break;
492 case RESX_EXPR:
493 make_eh_edges (last);
494 fallthru = false;
495 break;
497 case CALL_EXPR:
498 /* If this function receives a nonlocal goto, then we need to
499 make edges from this call site to all the nonlocal goto
500 handlers. */
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
504 /* If this statement has reachable exception handlers, then
505 create abnormal edges to them. */
506 make_eh_edges (last);
508 /* Some calls are known not to return. */
509 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
510 break;
512 case MODIFY_EXPR:
513 if (is_ctrl_altering_stmt (last))
515 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
516 CALL_EXPR may have an abnormal edge. Search the RHS for
517 this case and create any required edges. */
518 if (tree_can_make_abnormal_goto (last))
519 make_abnormal_goto_edges (bb, true);
521 make_eh_edges (last);
523 fallthru = true;
524 break;
526 case OMP_PARALLEL:
527 case OMP_FOR:
528 case OMP_SINGLE:
529 case OMP_MASTER:
530 case OMP_ORDERED:
531 case OMP_CRITICAL:
532 case OMP_SECTION:
533 cur_region = new_omp_region (bb, code, cur_region);
534 fallthru = true;
535 break;
537 case OMP_SECTIONS:
538 cur_region = new_omp_region (bb, code, cur_region);
539 fallthru = false;
540 break;
542 case OMP_RETURN:
543 /* In the case of an OMP_SECTION, the edge will go somewhere
544 other than the next block. This will be created later. */
545 cur_region->exit = bb;
546 fallthru = cur_region->type != OMP_SECTION;
547 cur_region = cur_region->outer;
548 break;
550 case OMP_CONTINUE:
551 cur_region->cont = bb;
552 switch (cur_region->type)
554 case OMP_FOR:
555 /* ??? Technically there should be a some sort of loopback
556 edge here, but it goes to a block that doesn't exist yet,
557 and without it, updating the ssa form would be a real
558 bear. Fortunately, we don't yet do ssa before expanding
559 these nodes. */
560 break;
562 case OMP_SECTIONS:
563 /* Wire up the edges into and out of the nested sections. */
564 /* ??? Similarly wrt loopback. */
566 struct omp_region *i;
567 for (i = cur_region->inner; i ; i = i->next)
569 gcc_assert (i->type == OMP_SECTION);
570 make_edge (cur_region->entry, i->entry, 0);
571 make_edge (i->exit, bb, EDGE_FALLTHRU);
574 break;
576 default:
577 gcc_unreachable ();
579 fallthru = true;
580 break;
582 default:
583 gcc_assert (!stmt_ends_bb_p (last));
584 fallthru = true;
587 else
588 fallthru = true;
590 if (fallthru)
591 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
594 if (root_omp_region)
595 free_omp_regions ();
597 /* Fold COND_EXPR_COND of each COND_EXPR. */
598 fold_cond_expr_cond ();
600 /* Clean up the graph and warn for unreachable code. */
601 cleanup_tree_cfg ();
605 /* Create the edges for a COND_EXPR starting at block BB.
606 At this point, both clauses must contain only simple gotos. */
608 static void
609 make_cond_expr_edges (basic_block bb)
611 tree entry = last_stmt (bb);
612 basic_block then_bb, else_bb;
613 tree then_label, else_label;
614 edge e;
616 gcc_assert (entry);
617 gcc_assert (TREE_CODE (entry) == COND_EXPR);
619 /* Entry basic blocks for each component. */
620 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
621 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
622 then_bb = label_to_block (then_label);
623 else_bb = label_to_block (else_label);
625 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
626 #ifdef USE_MAPPED_LOCATION
627 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
628 #else
629 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
630 #endif
631 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
632 if (e)
634 #ifdef USE_MAPPED_LOCATION
635 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
636 #else
637 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
638 #endif
642 /* Hashing routine for EDGE_TO_CASES. */
644 static hashval_t
645 edge_to_cases_hash (const void *p)
647 edge e = ((struct edge_to_cases_elt *)p)->e;
649 /* Hash on the edge itself (which is a pointer). */
650 return htab_hash_pointer (e);
653 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
654 for equality is just a pointer comparison. */
656 static int
657 edge_to_cases_eq (const void *p1, const void *p2)
659 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
660 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
662 return e1 == e2;
665 /* Called for each element in the hash table (P) as we delete the
666 edge to cases hash table.
668 Clear all the TREE_CHAINs to prevent problems with copying of
669 SWITCH_EXPRs and structure sharing rules, then free the hash table
670 element. */
672 static void
673 edge_to_cases_cleanup (void *p)
675 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
676 tree t, next;
678 for (t = elt->case_labels; t; t = next)
680 next = TREE_CHAIN (t);
681 TREE_CHAIN (t) = NULL;
683 free (p);
686 /* Start recording information mapping edges to case labels. */
688 void
689 start_recording_case_labels (void)
691 gcc_assert (edge_to_cases == NULL);
693 edge_to_cases = htab_create (37,
694 edge_to_cases_hash,
695 edge_to_cases_eq,
696 edge_to_cases_cleanup);
699 /* Return nonzero if we are recording information for case labels. */
701 static bool
702 recording_case_labels_p (void)
704 return (edge_to_cases != NULL);
707 /* Stop recording information mapping edges to case labels and
708 remove any information we have recorded. */
709 void
710 end_recording_case_labels (void)
712 htab_delete (edge_to_cases);
713 edge_to_cases = NULL;
716 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
718 static void
719 record_switch_edge (edge e, tree case_label)
721 struct edge_to_cases_elt *elt;
722 void **slot;
724 /* Build a hash table element so we can see if E is already
725 in the table. */
726 elt = XNEW (struct edge_to_cases_elt);
727 elt->e = e;
728 elt->case_labels = case_label;
730 slot = htab_find_slot (edge_to_cases, elt, INSERT);
732 if (*slot == NULL)
734 /* E was not in the hash table. Install E into the hash table. */
735 *slot = (void *)elt;
737 else
739 /* E was already in the hash table. Free ELT as we do not need it
740 anymore. */
741 free (elt);
743 /* Get the entry stored in the hash table. */
744 elt = (struct edge_to_cases_elt *) *slot;
746 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
747 TREE_CHAIN (case_label) = elt->case_labels;
748 elt->case_labels = case_label;
752 /* If we are inside a {start,end}_recording_cases block, then return
753 a chain of CASE_LABEL_EXPRs from T which reference E.
755 Otherwise return NULL. */
757 static tree
758 get_cases_for_edge (edge e, tree t)
760 struct edge_to_cases_elt elt, *elt_p;
761 void **slot;
762 size_t i, n;
763 tree vec;
765 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
766 chains available. Return NULL so the caller can detect this case. */
767 if (!recording_case_labels_p ())
768 return NULL;
770 restart:
771 elt.e = e;
772 elt.case_labels = NULL;
773 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
775 if (slot)
777 elt_p = (struct edge_to_cases_elt *)*slot;
778 return elt_p->case_labels;
781 /* If we did not find E in the hash table, then this must be the first
782 time we have been queried for information about E & T. Add all the
783 elements from T to the hash table then perform the query again. */
785 vec = SWITCH_LABELS (t);
786 n = TREE_VEC_LENGTH (vec);
787 for (i = 0; i < n; i++)
789 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
790 basic_block label_bb = label_to_block (lab);
791 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
793 goto restart;
796 /* Create the edges for a SWITCH_EXPR starting at block BB.
797 At this point, the switch body has been lowered and the
798 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
800 static void
801 make_switch_expr_edges (basic_block bb)
803 tree entry = last_stmt (bb);
804 size_t i, n;
805 tree vec;
807 vec = SWITCH_LABELS (entry);
808 n = TREE_VEC_LENGTH (vec);
810 for (i = 0; i < n; ++i)
812 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
813 basic_block label_bb = label_to_block (lab);
814 make_edge (bb, label_bb, 0);
819 /* Return the basic block holding label DEST. */
821 basic_block
822 label_to_block_fn (struct function *ifun, tree dest)
824 int uid = LABEL_DECL_UID (dest);
826 /* We would die hard when faced by an undefined label. Emit a label to
827 the very first basic block. This will hopefully make even the dataflow
828 and undefined variable warnings quite right. */
829 if ((errorcount || sorrycount) && uid < 0)
831 block_stmt_iterator bsi =
832 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
833 tree stmt;
835 stmt = build1 (LABEL_EXPR, void_type_node, dest);
836 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
837 uid = LABEL_DECL_UID (dest);
839 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
840 <= (unsigned int) uid)
841 return NULL;
842 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
845 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
846 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
848 void
849 make_abnormal_goto_edges (basic_block bb, bool for_call)
851 basic_block target_bb;
852 block_stmt_iterator bsi;
854 FOR_EACH_BB (target_bb)
855 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
857 tree target = bsi_stmt (bsi);
859 if (TREE_CODE (target) != LABEL_EXPR)
860 break;
862 target = LABEL_EXPR_LABEL (target);
864 /* Make an edge to every label block that has been marked as a
865 potential target for a computed goto or a non-local goto. */
866 if ((FORCED_LABEL (target) && !for_call)
867 || (DECL_NONLOCAL (target) && for_call))
869 make_edge (bb, target_bb, EDGE_ABNORMAL);
870 break;
875 /* Create edges for a goto statement at block BB. */
877 static void
878 make_goto_expr_edges (basic_block bb)
880 block_stmt_iterator last = bsi_last (bb);
881 tree goto_t = bsi_stmt (last);
883 /* A simple GOTO creates normal edges. */
884 if (simple_goto_p (goto_t))
886 tree dest = GOTO_DESTINATION (goto_t);
887 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
888 #ifdef USE_MAPPED_LOCATION
889 e->goto_locus = EXPR_LOCATION (goto_t);
890 #else
891 e->goto_locus = EXPR_LOCUS (goto_t);
892 #endif
893 bsi_remove (&last, true);
894 return;
897 /* A computed GOTO creates abnormal edges. */
898 make_abnormal_goto_edges (bb, false);
902 /*---------------------------------------------------------------------------
903 Flowgraph analysis
904 ---------------------------------------------------------------------------*/
906 /* Cleanup useless labels in basic blocks. This is something we wish
907 to do early because it allows us to group case labels before creating
908 the edges for the CFG, and it speeds up block statement iterators in
909 all passes later on.
910 We only run this pass once, running it more than once is probably not
911 profitable. */
913 /* A map from basic block index to the leading label of that block. */
914 static tree *label_for_bb;
916 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
917 static void
918 update_eh_label (struct eh_region *region)
920 tree old_label = get_eh_region_tree_label (region);
921 if (old_label)
923 tree new_label;
924 basic_block bb = label_to_block (old_label);
926 /* ??? After optimizing, there may be EH regions with labels
927 that have already been removed from the function body, so
928 there is no basic block for them. */
929 if (! bb)
930 return;
932 new_label = label_for_bb[bb->index];
933 set_eh_region_tree_label (region, new_label);
937 /* Given LABEL return the first label in the same basic block. */
938 static tree
939 main_block_label (tree label)
941 basic_block bb = label_to_block (label);
943 /* label_to_block possibly inserted undefined label into the chain. */
944 if (!label_for_bb[bb->index])
945 label_for_bb[bb->index] = label;
946 return label_for_bb[bb->index];
949 /* Cleanup redundant labels. This is a three-step process:
950 1) Find the leading label for each block.
951 2) Redirect all references to labels to the leading labels.
952 3) Cleanup all useless labels. */
954 void
955 cleanup_dead_labels (void)
957 basic_block bb;
958 label_for_bb = XCNEWVEC (tree, last_basic_block);
960 /* Find a suitable label for each block. We use the first user-defined
961 label if there is one, or otherwise just the first label we see. */
962 FOR_EACH_BB (bb)
964 block_stmt_iterator i;
966 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
968 tree label, stmt = bsi_stmt (i);
970 if (TREE_CODE (stmt) != LABEL_EXPR)
971 break;
973 label = LABEL_EXPR_LABEL (stmt);
975 /* If we have not yet seen a label for the current block,
976 remember this one and see if there are more labels. */
977 if (! label_for_bb[bb->index])
979 label_for_bb[bb->index] = label;
980 continue;
983 /* If we did see a label for the current block already, but it
984 is an artificially created label, replace it if the current
985 label is a user defined label. */
986 if (! DECL_ARTIFICIAL (label)
987 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
989 label_for_bb[bb->index] = label;
990 break;
995 /* Now redirect all jumps/branches to the selected label.
996 First do so for each block ending in a control statement. */
997 FOR_EACH_BB (bb)
999 tree stmt = last_stmt (bb);
1000 if (!stmt)
1001 continue;
1003 switch (TREE_CODE (stmt))
1005 case COND_EXPR:
1007 tree true_branch, false_branch;
1009 true_branch = COND_EXPR_THEN (stmt);
1010 false_branch = COND_EXPR_ELSE (stmt);
1012 GOTO_DESTINATION (true_branch)
1013 = main_block_label (GOTO_DESTINATION (true_branch));
1014 GOTO_DESTINATION (false_branch)
1015 = main_block_label (GOTO_DESTINATION (false_branch));
1017 break;
1020 case SWITCH_EXPR:
1022 size_t i;
1023 tree vec = SWITCH_LABELS (stmt);
1024 size_t n = TREE_VEC_LENGTH (vec);
1026 /* Replace all destination labels. */
1027 for (i = 0; i < n; ++i)
1029 tree elt = TREE_VEC_ELT (vec, i);
1030 tree label = main_block_label (CASE_LABEL (elt));
1031 CASE_LABEL (elt) = label;
1033 break;
1036 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1037 remove them until after we've created the CFG edges. */
1038 case GOTO_EXPR:
1039 if (! computed_goto_p (stmt))
1041 GOTO_DESTINATION (stmt)
1042 = main_block_label (GOTO_DESTINATION (stmt));
1043 break;
1046 default:
1047 break;
1051 for_each_eh_region (update_eh_label);
1053 /* Finally, purge dead labels. All user-defined labels and labels that
1054 can be the target of non-local gotos and labels which have their
1055 address taken are preserved. */
1056 FOR_EACH_BB (bb)
1058 block_stmt_iterator i;
1059 tree label_for_this_bb = label_for_bb[bb->index];
1061 if (! label_for_this_bb)
1062 continue;
1064 for (i = bsi_start (bb); !bsi_end_p (i); )
1066 tree label, stmt = bsi_stmt (i);
1068 if (TREE_CODE (stmt) != LABEL_EXPR)
1069 break;
1071 label = LABEL_EXPR_LABEL (stmt);
1073 if (label == label_for_this_bb
1074 || ! DECL_ARTIFICIAL (label)
1075 || DECL_NONLOCAL (label)
1076 || FORCED_LABEL (label))
1077 bsi_next (&i);
1078 else
1079 bsi_remove (&i, true);
1083 free (label_for_bb);
1086 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1087 and scan the sorted vector of cases. Combine the ones jumping to the
1088 same label.
1089 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1091 void
1092 group_case_labels (void)
1094 basic_block bb;
1096 FOR_EACH_BB (bb)
1098 tree stmt = last_stmt (bb);
1099 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1101 tree labels = SWITCH_LABELS (stmt);
1102 int old_size = TREE_VEC_LENGTH (labels);
1103 int i, j, new_size = old_size;
1104 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1105 tree default_label;
1107 /* The default label is always the last case in a switch
1108 statement after gimplification. */
1109 default_label = CASE_LABEL (default_case);
1111 /* Look for possible opportunities to merge cases.
1112 Ignore the last element of the label vector because it
1113 must be the default case. */
1114 i = 0;
1115 while (i < old_size - 1)
1117 tree base_case, base_label, base_high;
1118 base_case = TREE_VEC_ELT (labels, i);
1120 gcc_assert (base_case);
1121 base_label = CASE_LABEL (base_case);
1123 /* Discard cases that have the same destination as the
1124 default case. */
1125 if (base_label == default_label)
1127 TREE_VEC_ELT (labels, i) = NULL_TREE;
1128 i++;
1129 new_size--;
1130 continue;
1133 base_high = CASE_HIGH (base_case) ?
1134 CASE_HIGH (base_case) : CASE_LOW (base_case);
1135 i++;
1136 /* Try to merge case labels. Break out when we reach the end
1137 of the label vector or when we cannot merge the next case
1138 label with the current one. */
1139 while (i < old_size - 1)
1141 tree merge_case = TREE_VEC_ELT (labels, i);
1142 tree merge_label = CASE_LABEL (merge_case);
1143 tree t = int_const_binop (PLUS_EXPR, base_high,
1144 integer_one_node, 1);
1146 /* Merge the cases if they jump to the same place,
1147 and their ranges are consecutive. */
1148 if (merge_label == base_label
1149 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1151 base_high = CASE_HIGH (merge_case) ?
1152 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1153 CASE_HIGH (base_case) = base_high;
1154 TREE_VEC_ELT (labels, i) = NULL_TREE;
1155 new_size--;
1156 i++;
1158 else
1159 break;
1163 /* Compress the case labels in the label vector, and adjust the
1164 length of the vector. */
1165 for (i = 0, j = 0; i < new_size; i++)
1167 while (! TREE_VEC_ELT (labels, j))
1168 j++;
1169 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1171 TREE_VEC_LENGTH (labels) = new_size;
1176 /* Checks whether we can merge block B into block A. */
1178 static bool
1179 tree_can_merge_blocks_p (basic_block a, basic_block b)
1181 tree stmt;
1182 block_stmt_iterator bsi;
1183 tree phi;
1185 if (!single_succ_p (a))
1186 return false;
1188 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1189 return false;
1191 if (single_succ (a) != b)
1192 return false;
1194 if (!single_pred_p (b))
1195 return false;
1197 if (b == EXIT_BLOCK_PTR)
1198 return false;
1200 /* If A ends by a statement causing exceptions or something similar, we
1201 cannot merge the blocks. */
1202 stmt = last_stmt (a);
1203 if (stmt && stmt_ends_bb_p (stmt))
1204 return false;
1206 /* Do not allow a block with only a non-local label to be merged. */
1207 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1208 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1209 return false;
1211 /* It must be possible to eliminate all phi nodes in B. If ssa form
1212 is not up-to-date, we cannot eliminate any phis. */
1213 phi = phi_nodes (b);
1214 if (phi)
1216 if (need_ssa_update_p ())
1217 return false;
1219 for (; phi; phi = PHI_CHAIN (phi))
1220 if (!is_gimple_reg (PHI_RESULT (phi))
1221 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1222 return false;
1225 /* Do not remove user labels. */
1226 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1228 stmt = bsi_stmt (bsi);
1229 if (TREE_CODE (stmt) != LABEL_EXPR)
1230 break;
1231 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1232 return false;
1235 /* Protect the loop latches. */
1236 if (current_loops
1237 && b->loop_father->latch == b)
1238 return false;
1240 return true;
1243 /* Replaces all uses of NAME by VAL. */
1245 void
1246 replace_uses_by (tree name, tree val)
1248 imm_use_iterator imm_iter;
1249 use_operand_p use;
1250 tree stmt;
1251 edge e;
1252 unsigned i;
1255 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1257 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1259 replace_exp (use, val);
1261 if (TREE_CODE (stmt) == PHI_NODE)
1263 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1264 if (e->flags & EDGE_ABNORMAL)
1266 /* This can only occur for virtual operands, since
1267 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1268 would prevent replacement. */
1269 gcc_assert (!is_gimple_reg (name));
1270 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1274 if (TREE_CODE (stmt) != PHI_NODE)
1276 tree rhs;
1278 fold_stmt_inplace (stmt);
1279 rhs = get_rhs (stmt);
1280 if (TREE_CODE (rhs) == ADDR_EXPR)
1281 recompute_tree_invariant_for_addr_expr (rhs);
1283 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1284 mark_new_vars_to_rename (stmt);
1288 gcc_assert (num_imm_uses (name) == 0);
1290 /* Also update the trees stored in loop structures. */
1291 if (current_loops)
1293 struct loop *loop;
1295 for (i = 0; i < current_loops->num; i++)
1297 loop = current_loops->parray[i];
1298 if (loop)
1299 substitute_in_loop_info (loop, name, val);
1304 /* Merge block B into block A. */
1306 static void
1307 tree_merge_blocks (basic_block a, basic_block b)
1309 block_stmt_iterator bsi;
1310 tree_stmt_iterator last;
1311 tree phi;
1313 if (dump_file)
1314 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1316 /* Remove all single-valued PHI nodes from block B of the form
1317 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1318 bsi = bsi_last (a);
1319 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1321 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1322 tree copy;
1323 bool may_replace_uses = may_propagate_copy (def, use);
1325 /* In case we have loops to care about, do not propagate arguments of
1326 loop closed ssa phi nodes. */
1327 if (current_loops
1328 && is_gimple_reg (def)
1329 && TREE_CODE (use) == SSA_NAME
1330 && a->loop_father != b->loop_father)
1331 may_replace_uses = false;
1333 if (!may_replace_uses)
1335 gcc_assert (is_gimple_reg (def));
1337 /* Note that just emitting the copies is fine -- there is no problem
1338 with ordering of phi nodes. This is because A is the single
1339 predecessor of B, therefore results of the phi nodes cannot
1340 appear as arguments of the phi nodes. */
1341 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1342 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1343 SET_PHI_RESULT (phi, NULL_TREE);
1344 SSA_NAME_DEF_STMT (def) = copy;
1346 else
1347 replace_uses_by (def, use);
1349 remove_phi_node (phi, NULL);
1352 /* Ensure that B follows A. */
1353 move_block_after (b, a);
1355 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1356 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1358 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1359 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1361 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1363 tree label = bsi_stmt (bsi);
1365 bsi_remove (&bsi, false);
1366 /* Now that we can thread computed gotos, we might have
1367 a situation where we have a forced label in block B
1368 However, the label at the start of block B might still be
1369 used in other ways (think about the runtime checking for
1370 Fortran assigned gotos). So we can not just delete the
1371 label. Instead we move the label to the start of block A. */
1372 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1374 block_stmt_iterator dest_bsi = bsi_start (a);
1375 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1378 else
1380 change_bb_for_stmt (bsi_stmt (bsi), a);
1381 bsi_next (&bsi);
1385 /* Merge the chains. */
1386 last = tsi_last (a->stmt_list);
1387 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1388 b->stmt_list = NULL;
1392 /* Return the one of two successors of BB that is not reachable by a
1393 reached by a complex edge, if there is one. Else, return BB. We use
1394 this in optimizations that use post-dominators for their heuristics,
1395 to catch the cases in C++ where function calls are involved. */
1397 basic_block
1398 single_noncomplex_succ (basic_block bb)
1400 edge e0, e1;
1401 if (EDGE_COUNT (bb->succs) != 2)
1402 return bb;
1404 e0 = EDGE_SUCC (bb, 0);
1405 e1 = EDGE_SUCC (bb, 1);
1406 if (e0->flags & EDGE_COMPLEX)
1407 return e1->dest;
1408 if (e1->flags & EDGE_COMPLEX)
1409 return e0->dest;
1411 return bb;
1415 /* Walk the function tree removing unnecessary statements.
1417 * Empty statement nodes are removed
1419 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1421 * Unnecessary COND_EXPRs are removed
1423 * Some unnecessary BIND_EXPRs are removed
1425 Clearly more work could be done. The trick is doing the analysis
1426 and removal fast enough to be a net improvement in compile times.
1428 Note that when we remove a control structure such as a COND_EXPR
1429 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1430 to ensure we eliminate all the useless code. */
1432 struct rus_data
1434 tree *last_goto;
1435 bool repeat;
1436 bool may_throw;
1437 bool may_branch;
1438 bool has_label;
1441 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1443 static bool
1444 remove_useless_stmts_warn_notreached (tree stmt)
1446 if (EXPR_HAS_LOCATION (stmt))
1448 location_t loc = EXPR_LOCATION (stmt);
1449 if (LOCATION_LINE (loc) > 0)
1451 warning (0, "%Hwill never be executed", &loc);
1452 return true;
1456 switch (TREE_CODE (stmt))
1458 case STATEMENT_LIST:
1460 tree_stmt_iterator i;
1461 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1462 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1463 return true;
1465 break;
1467 case COND_EXPR:
1468 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1469 return true;
1470 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1471 return true;
1472 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1473 return true;
1474 break;
1476 case TRY_FINALLY_EXPR:
1477 case TRY_CATCH_EXPR:
1478 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1479 return true;
1480 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1481 return true;
1482 break;
1484 case CATCH_EXPR:
1485 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1486 case EH_FILTER_EXPR:
1487 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1488 case BIND_EXPR:
1489 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1491 default:
1492 /* Not a live container. */
1493 break;
1496 return false;
1499 static void
1500 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1502 tree then_clause, else_clause, cond;
1503 bool save_has_label, then_has_label, else_has_label;
1505 save_has_label = data->has_label;
1506 data->has_label = false;
1507 data->last_goto = NULL;
1509 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1511 then_has_label = data->has_label;
1512 data->has_label = false;
1513 data->last_goto = NULL;
1515 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1517 else_has_label = data->has_label;
1518 data->has_label = save_has_label | then_has_label | else_has_label;
1520 then_clause = COND_EXPR_THEN (*stmt_p);
1521 else_clause = COND_EXPR_ELSE (*stmt_p);
1522 cond = fold (COND_EXPR_COND (*stmt_p));
1524 /* If neither arm does anything at all, we can remove the whole IF. */
1525 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1527 *stmt_p = build_empty_stmt ();
1528 data->repeat = true;
1531 /* If there are no reachable statements in an arm, then we can
1532 zap the entire conditional. */
1533 else if (integer_nonzerop (cond) && !else_has_label)
1535 if (warn_notreached)
1536 remove_useless_stmts_warn_notreached (else_clause);
1537 *stmt_p = then_clause;
1538 data->repeat = true;
1540 else if (integer_zerop (cond) && !then_has_label)
1542 if (warn_notreached)
1543 remove_useless_stmts_warn_notreached (then_clause);
1544 *stmt_p = else_clause;
1545 data->repeat = true;
1548 /* Check a couple of simple things on then/else with single stmts. */
1549 else
1551 tree then_stmt = expr_only (then_clause);
1552 tree else_stmt = expr_only (else_clause);
1554 /* Notice branches to a common destination. */
1555 if (then_stmt && else_stmt
1556 && TREE_CODE (then_stmt) == GOTO_EXPR
1557 && TREE_CODE (else_stmt) == GOTO_EXPR
1558 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1560 *stmt_p = then_stmt;
1561 data->repeat = true;
1564 /* If the THEN/ELSE clause merely assigns a value to a variable or
1565 parameter which is already known to contain that value, then
1566 remove the useless THEN/ELSE clause. */
1567 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1569 if (else_stmt
1570 && TREE_CODE (else_stmt) == MODIFY_EXPR
1571 && TREE_OPERAND (else_stmt, 0) == cond
1572 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1573 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1575 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1576 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1577 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1578 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1580 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1581 ? then_stmt : else_stmt);
1582 tree *location = (TREE_CODE (cond) == EQ_EXPR
1583 ? &COND_EXPR_THEN (*stmt_p)
1584 : &COND_EXPR_ELSE (*stmt_p));
1586 if (stmt
1587 && TREE_CODE (stmt) == MODIFY_EXPR
1588 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1589 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1590 *location = alloc_stmt_list ();
1594 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1595 would be re-introduced during lowering. */
1596 data->last_goto = NULL;
1600 static void
1601 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1603 bool save_may_branch, save_may_throw;
1604 bool this_may_branch, this_may_throw;
1606 /* Collect may_branch and may_throw information for the body only. */
1607 save_may_branch = data->may_branch;
1608 save_may_throw = data->may_throw;
1609 data->may_branch = false;
1610 data->may_throw = false;
1611 data->last_goto = NULL;
1613 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1615 this_may_branch = data->may_branch;
1616 this_may_throw = data->may_throw;
1617 data->may_branch |= save_may_branch;
1618 data->may_throw |= save_may_throw;
1619 data->last_goto = NULL;
1621 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1623 /* If the body is empty, then we can emit the FINALLY block without
1624 the enclosing TRY_FINALLY_EXPR. */
1625 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1627 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1628 data->repeat = true;
1631 /* If the handler is empty, then we can emit the TRY block without
1632 the enclosing TRY_FINALLY_EXPR. */
1633 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1635 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1636 data->repeat = true;
1639 /* If the body neither throws, nor branches, then we can safely
1640 string the TRY and FINALLY blocks together. */
1641 else if (!this_may_branch && !this_may_throw)
1643 tree stmt = *stmt_p;
1644 *stmt_p = TREE_OPERAND (stmt, 0);
1645 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1646 data->repeat = true;
1651 static void
1652 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1654 bool save_may_throw, this_may_throw;
1655 tree_stmt_iterator i;
1656 tree stmt;
1658 /* Collect may_throw information for the body only. */
1659 save_may_throw = data->may_throw;
1660 data->may_throw = false;
1661 data->last_goto = NULL;
1663 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1665 this_may_throw = data->may_throw;
1666 data->may_throw = save_may_throw;
1668 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1669 if (!this_may_throw)
1671 if (warn_notreached)
1672 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1673 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1674 data->repeat = true;
1675 return;
1678 /* Process the catch clause specially. We may be able to tell that
1679 no exceptions propagate past this point. */
1681 this_may_throw = true;
1682 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1683 stmt = tsi_stmt (i);
1684 data->last_goto = NULL;
1686 switch (TREE_CODE (stmt))
1688 case CATCH_EXPR:
1689 for (; !tsi_end_p (i); tsi_next (&i))
1691 stmt = tsi_stmt (i);
1692 /* If we catch all exceptions, then the body does not
1693 propagate exceptions past this point. */
1694 if (CATCH_TYPES (stmt) == NULL)
1695 this_may_throw = false;
1696 data->last_goto = NULL;
1697 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1699 break;
1701 case EH_FILTER_EXPR:
1702 if (EH_FILTER_MUST_NOT_THROW (stmt))
1703 this_may_throw = false;
1704 else if (EH_FILTER_TYPES (stmt) == NULL)
1705 this_may_throw = false;
1706 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1707 break;
1709 default:
1710 /* Otherwise this is a cleanup. */
1711 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1713 /* If the cleanup is empty, then we can emit the TRY block without
1714 the enclosing TRY_CATCH_EXPR. */
1715 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1717 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1718 data->repeat = true;
1720 break;
1722 data->may_throw |= this_may_throw;
1726 static void
1727 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1729 tree block;
1731 /* First remove anything underneath the BIND_EXPR. */
1732 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1734 /* If the BIND_EXPR has no variables, then we can pull everything
1735 up one level and remove the BIND_EXPR, unless this is the toplevel
1736 BIND_EXPR for the current function or an inlined function.
1738 When this situation occurs we will want to apply this
1739 optimization again. */
1740 block = BIND_EXPR_BLOCK (*stmt_p);
1741 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1742 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1743 && (! block
1744 || ! BLOCK_ABSTRACT_ORIGIN (block)
1745 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1746 != FUNCTION_DECL)))
1748 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1749 data->repeat = true;
1754 static void
1755 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1757 tree dest = GOTO_DESTINATION (*stmt_p);
1759 data->may_branch = true;
1760 data->last_goto = NULL;
1762 /* Record the last goto expr, so that we can delete it if unnecessary. */
1763 if (TREE_CODE (dest) == LABEL_DECL)
1764 data->last_goto = stmt_p;
1768 static void
1769 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1771 tree label = LABEL_EXPR_LABEL (*stmt_p);
1773 data->has_label = true;
1775 /* We do want to jump across non-local label receiver code. */
1776 if (DECL_NONLOCAL (label))
1777 data->last_goto = NULL;
1779 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1781 *data->last_goto = build_empty_stmt ();
1782 data->repeat = true;
1785 /* ??? Add something here to delete unused labels. */
1789 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1790 decl. This allows us to eliminate redundant or useless
1791 calls to "const" functions.
1793 Gimplifier already does the same operation, but we may notice functions
1794 being const and pure once their calls has been gimplified, so we need
1795 to update the flag. */
1797 static void
1798 update_call_expr_flags (tree call)
1800 tree decl = get_callee_fndecl (call);
1801 if (!decl)
1802 return;
1803 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1804 TREE_SIDE_EFFECTS (call) = 0;
1805 if (TREE_NOTHROW (decl))
1806 TREE_NOTHROW (call) = 1;
1810 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1812 void
1813 notice_special_calls (tree t)
1815 int flags = call_expr_flags (t);
1817 if (flags & ECF_MAY_BE_ALLOCA)
1818 current_function_calls_alloca = true;
1819 if (flags & ECF_RETURNS_TWICE)
1820 current_function_calls_setjmp = true;
1824 /* Clear flags set by notice_special_calls. Used by dead code removal
1825 to update the flags. */
1827 void
1828 clear_special_calls (void)
1830 current_function_calls_alloca = false;
1831 current_function_calls_setjmp = false;
1835 static void
1836 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1838 tree t = *tp, op;
1840 switch (TREE_CODE (t))
1842 case COND_EXPR:
1843 remove_useless_stmts_cond (tp, data);
1844 break;
1846 case TRY_FINALLY_EXPR:
1847 remove_useless_stmts_tf (tp, data);
1848 break;
1850 case TRY_CATCH_EXPR:
1851 remove_useless_stmts_tc (tp, data);
1852 break;
1854 case BIND_EXPR:
1855 remove_useless_stmts_bind (tp, data);
1856 break;
1858 case GOTO_EXPR:
1859 remove_useless_stmts_goto (tp, data);
1860 break;
1862 case LABEL_EXPR:
1863 remove_useless_stmts_label (tp, data);
1864 break;
1866 case RETURN_EXPR:
1867 fold_stmt (tp);
1868 data->last_goto = NULL;
1869 data->may_branch = true;
1870 break;
1872 case CALL_EXPR:
1873 fold_stmt (tp);
1874 data->last_goto = NULL;
1875 notice_special_calls (t);
1876 update_call_expr_flags (t);
1877 if (tree_could_throw_p (t))
1878 data->may_throw = true;
1879 break;
1881 case MODIFY_EXPR:
1882 data->last_goto = NULL;
1883 fold_stmt (tp);
1884 op = get_call_expr_in (t);
1885 if (op)
1887 update_call_expr_flags (op);
1888 notice_special_calls (op);
1890 if (tree_could_throw_p (t))
1891 data->may_throw = true;
1892 break;
1894 case STATEMENT_LIST:
1896 tree_stmt_iterator i = tsi_start (t);
1897 while (!tsi_end_p (i))
1899 t = tsi_stmt (i);
1900 if (IS_EMPTY_STMT (t))
1902 tsi_delink (&i);
1903 continue;
1906 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1908 t = tsi_stmt (i);
1909 if (TREE_CODE (t) == STATEMENT_LIST)
1911 tsi_link_before (&i, t, TSI_SAME_STMT);
1912 tsi_delink (&i);
1914 else
1915 tsi_next (&i);
1918 break;
1919 case ASM_EXPR:
1920 fold_stmt (tp);
1921 data->last_goto = NULL;
1922 break;
1924 default:
1925 data->last_goto = NULL;
1926 break;
1930 static unsigned int
1931 remove_useless_stmts (void)
1933 struct rus_data data;
1935 clear_special_calls ();
1939 memset (&data, 0, sizeof (data));
1940 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1942 while (data.repeat);
1943 return 0;
1947 struct tree_opt_pass pass_remove_useless_stmts =
1949 "useless", /* name */
1950 NULL, /* gate */
1951 remove_useless_stmts, /* execute */
1952 NULL, /* sub */
1953 NULL, /* next */
1954 0, /* static_pass_number */
1955 0, /* tv_id */
1956 PROP_gimple_any, /* properties_required */
1957 0, /* properties_provided */
1958 0, /* properties_destroyed */
1959 0, /* todo_flags_start */
1960 TODO_dump_func, /* todo_flags_finish */
1961 0 /* letter */
1964 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1966 static void
1967 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1969 tree phi;
1971 /* Since this block is no longer reachable, we can just delete all
1972 of its PHI nodes. */
1973 phi = phi_nodes (bb);
1974 while (phi)
1976 tree next = PHI_CHAIN (phi);
1977 remove_phi_node (phi, NULL_TREE);
1978 phi = next;
1981 /* Remove edges to BB's successors. */
1982 while (EDGE_COUNT (bb->succs) > 0)
1983 remove_edge (EDGE_SUCC (bb, 0));
1987 /* Remove statements of basic block BB. */
1989 static void
1990 remove_bb (basic_block bb)
1992 block_stmt_iterator i;
1993 #ifdef USE_MAPPED_LOCATION
1994 source_location loc = UNKNOWN_LOCATION;
1995 #else
1996 source_locus loc = 0;
1997 #endif
1999 if (dump_file)
2001 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2002 if (dump_flags & TDF_DETAILS)
2004 dump_bb (bb, dump_file, 0);
2005 fprintf (dump_file, "\n");
2009 /* If we remove the header or the latch of a loop, mark the loop for
2010 removal by setting its header and latch to NULL. */
2011 if (current_loops)
2013 struct loop *loop = bb->loop_father;
2015 if (loop->latch == bb
2016 || loop->header == bb)
2018 loop->latch = NULL;
2019 loop->header = NULL;
2021 /* Also clean up the information associated with the loop. Updating
2022 it would waste time. More importantly, it may refer to ssa
2023 names that were defined in other removed basic block -- these
2024 ssa names are now removed and invalid. */
2025 free_numbers_of_iterations_estimates_loop (loop);
2029 /* Remove all the instructions in the block. */
2030 for (i = bsi_start (bb); !bsi_end_p (i);)
2032 tree stmt = bsi_stmt (i);
2033 if (TREE_CODE (stmt) == LABEL_EXPR
2034 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2035 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2037 basic_block new_bb;
2038 block_stmt_iterator new_bsi;
2040 /* A non-reachable non-local label may still be referenced.
2041 But it no longer needs to carry the extra semantics of
2042 non-locality. */
2043 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2045 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2046 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2049 new_bb = bb->prev_bb;
2050 new_bsi = bsi_start (new_bb);
2051 bsi_remove (&i, false);
2052 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2054 else
2056 /* Release SSA definitions if we are in SSA. Note that we
2057 may be called when not in SSA. For example,
2058 final_cleanup calls this function via
2059 cleanup_tree_cfg. */
2060 if (in_ssa_p)
2061 release_defs (stmt);
2063 bsi_remove (&i, true);
2066 /* Don't warn for removed gotos. Gotos are often removed due to
2067 jump threading, thus resulting in bogus warnings. Not great,
2068 since this way we lose warnings for gotos in the original
2069 program that are indeed unreachable. */
2070 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2072 #ifdef USE_MAPPED_LOCATION
2073 if (EXPR_HAS_LOCATION (stmt))
2074 loc = EXPR_LOCATION (stmt);
2075 #else
2076 source_locus t;
2077 t = EXPR_LOCUS (stmt);
2078 if (t && LOCATION_LINE (*t) > 0)
2079 loc = t;
2080 #endif
2084 /* If requested, give a warning that the first statement in the
2085 block is unreachable. We walk statements backwards in the
2086 loop above, so the last statement we process is the first statement
2087 in the block. */
2088 #ifdef USE_MAPPED_LOCATION
2089 if (loc > BUILTINS_LOCATION)
2090 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2091 #else
2092 if (loc)
2093 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2094 #endif
2096 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2100 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2101 predicate VAL, return the edge that will be taken out of the block.
2102 If VAL does not match a unique edge, NULL is returned. */
2104 edge
2105 find_taken_edge (basic_block bb, tree val)
2107 tree stmt;
2109 stmt = last_stmt (bb);
2111 gcc_assert (stmt);
2112 gcc_assert (is_ctrl_stmt (stmt));
2113 gcc_assert (val);
2115 if (! is_gimple_min_invariant (val))
2116 return NULL;
2118 if (TREE_CODE (stmt) == COND_EXPR)
2119 return find_taken_edge_cond_expr (bb, val);
2121 if (TREE_CODE (stmt) == SWITCH_EXPR)
2122 return find_taken_edge_switch_expr (bb, val);
2124 if (computed_goto_p (stmt))
2126 /* Only optimize if the argument is a label, if the argument is
2127 not a label then we can not construct a proper CFG.
2129 It may be the case that we only need to allow the LABEL_REF to
2130 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2131 appear inside a LABEL_EXPR just to be safe. */
2132 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2133 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2134 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2135 return NULL;
2138 gcc_unreachable ();
2141 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2142 statement, determine which of the outgoing edges will be taken out of the
2143 block. Return NULL if either edge may be taken. */
2145 static edge
2146 find_taken_edge_computed_goto (basic_block bb, tree val)
2148 basic_block dest;
2149 edge e = NULL;
2151 dest = label_to_block (val);
2152 if (dest)
2154 e = find_edge (bb, dest);
2155 gcc_assert (e != NULL);
2158 return e;
2161 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2162 statement, determine which of the two edges will be taken out of the
2163 block. Return NULL if either edge may be taken. */
2165 static edge
2166 find_taken_edge_cond_expr (basic_block bb, tree val)
2168 edge true_edge, false_edge;
2170 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2172 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2173 return (zero_p (val) ? false_edge : true_edge);
2176 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2177 statement, determine which edge will be taken out of the block. Return
2178 NULL if any edge may be taken. */
2180 static edge
2181 find_taken_edge_switch_expr (basic_block bb, tree val)
2183 tree switch_expr, taken_case;
2184 basic_block dest_bb;
2185 edge e;
2187 switch_expr = last_stmt (bb);
2188 taken_case = find_case_label_for_value (switch_expr, val);
2189 dest_bb = label_to_block (CASE_LABEL (taken_case));
2191 e = find_edge (bb, dest_bb);
2192 gcc_assert (e);
2193 return e;
2197 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2198 We can make optimal use here of the fact that the case labels are
2199 sorted: We can do a binary search for a case matching VAL. */
2201 static tree
2202 find_case_label_for_value (tree switch_expr, tree val)
2204 tree vec = SWITCH_LABELS (switch_expr);
2205 size_t low, high, n = TREE_VEC_LENGTH (vec);
2206 tree default_case = TREE_VEC_ELT (vec, n - 1);
2208 for (low = -1, high = n - 1; high - low > 1; )
2210 size_t i = (high + low) / 2;
2211 tree t = TREE_VEC_ELT (vec, i);
2212 int cmp;
2214 /* Cache the result of comparing CASE_LOW and val. */
2215 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2217 if (cmp > 0)
2218 high = i;
2219 else
2220 low = i;
2222 if (CASE_HIGH (t) == NULL)
2224 /* A singe-valued case label. */
2225 if (cmp == 0)
2226 return t;
2228 else
2230 /* A case range. We can only handle integer ranges. */
2231 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2232 return t;
2236 return default_case;
2242 /*---------------------------------------------------------------------------
2243 Debugging functions
2244 ---------------------------------------------------------------------------*/
2246 /* Dump tree-specific information of block BB to file OUTF. */
2248 void
2249 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2251 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2255 /* Dump a basic block on stderr. */
2257 void
2258 debug_tree_bb (basic_block bb)
2260 dump_bb (bb, stderr, 0);
2264 /* Dump basic block with index N on stderr. */
2266 basic_block
2267 debug_tree_bb_n (int n)
2269 debug_tree_bb (BASIC_BLOCK (n));
2270 return BASIC_BLOCK (n);
2274 /* Dump the CFG on stderr.
2276 FLAGS are the same used by the tree dumping functions
2277 (see TDF_* in tree-pass.h). */
2279 void
2280 debug_tree_cfg (int flags)
2282 dump_tree_cfg (stderr, flags);
2286 /* Dump the program showing basic block boundaries on the given FILE.
2288 FLAGS are the same used by the tree dumping functions (see TDF_* in
2289 tree.h). */
2291 void
2292 dump_tree_cfg (FILE *file, int flags)
2294 if (flags & TDF_DETAILS)
2296 const char *funcname
2297 = lang_hooks.decl_printable_name (current_function_decl, 2);
2299 fputc ('\n', file);
2300 fprintf (file, ";; Function %s\n\n", funcname);
2301 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2302 n_basic_blocks, n_edges, last_basic_block);
2304 brief_dump_cfg (file);
2305 fprintf (file, "\n");
2308 if (flags & TDF_STATS)
2309 dump_cfg_stats (file);
2311 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2315 /* Dump CFG statistics on FILE. */
2317 void
2318 dump_cfg_stats (FILE *file)
2320 static long max_num_merged_labels = 0;
2321 unsigned long size, total = 0;
2322 long num_edges;
2323 basic_block bb;
2324 const char * const fmt_str = "%-30s%-13s%12s\n";
2325 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2326 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2327 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2328 const char *funcname
2329 = lang_hooks.decl_printable_name (current_function_decl, 2);
2332 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2334 fprintf (file, "---------------------------------------------------------\n");
2335 fprintf (file, fmt_str, "", " Number of ", "Memory");
2336 fprintf (file, fmt_str, "", " instances ", "used ");
2337 fprintf (file, "---------------------------------------------------------\n");
2339 size = n_basic_blocks * sizeof (struct basic_block_def);
2340 total += size;
2341 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2342 SCALE (size), LABEL (size));
2344 num_edges = 0;
2345 FOR_EACH_BB (bb)
2346 num_edges += EDGE_COUNT (bb->succs);
2347 size = num_edges * sizeof (struct edge_def);
2348 total += size;
2349 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2351 fprintf (file, "---------------------------------------------------------\n");
2352 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2353 LABEL (total));
2354 fprintf (file, "---------------------------------------------------------\n");
2355 fprintf (file, "\n");
2357 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2358 max_num_merged_labels = cfg_stats.num_merged_labels;
2360 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2361 cfg_stats.num_merged_labels, max_num_merged_labels);
2363 fprintf (file, "\n");
2367 /* Dump CFG statistics on stderr. Keep extern so that it's always
2368 linked in the final executable. */
2370 void
2371 debug_cfg_stats (void)
2373 dump_cfg_stats (stderr);
2377 /* Dump the flowgraph to a .vcg FILE. */
2379 static void
2380 tree_cfg2vcg (FILE *file)
2382 edge e;
2383 edge_iterator ei;
2384 basic_block bb;
2385 const char *funcname
2386 = lang_hooks.decl_printable_name (current_function_decl, 2);
2388 /* Write the file header. */
2389 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2390 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2391 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2393 /* Write blocks and edges. */
2394 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2396 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2397 e->dest->index);
2399 if (e->flags & EDGE_FAKE)
2400 fprintf (file, " linestyle: dotted priority: 10");
2401 else
2402 fprintf (file, " linestyle: solid priority: 100");
2404 fprintf (file, " }\n");
2406 fputc ('\n', file);
2408 FOR_EACH_BB (bb)
2410 enum tree_code head_code, end_code;
2411 const char *head_name, *end_name;
2412 int head_line = 0;
2413 int end_line = 0;
2414 tree first = first_stmt (bb);
2415 tree last = last_stmt (bb);
2417 if (first)
2419 head_code = TREE_CODE (first);
2420 head_name = tree_code_name[head_code];
2421 head_line = get_lineno (first);
2423 else
2424 head_name = "no-statement";
2426 if (last)
2428 end_code = TREE_CODE (last);
2429 end_name = tree_code_name[end_code];
2430 end_line = get_lineno (last);
2432 else
2433 end_name = "no-statement";
2435 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2436 bb->index, bb->index, head_name, head_line, end_name,
2437 end_line);
2439 FOR_EACH_EDGE (e, ei, bb->succs)
2441 if (e->dest == EXIT_BLOCK_PTR)
2442 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2443 else
2444 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2446 if (e->flags & EDGE_FAKE)
2447 fprintf (file, " priority: 10 linestyle: dotted");
2448 else
2449 fprintf (file, " priority: 100 linestyle: solid");
2451 fprintf (file, " }\n");
2454 if (bb->next_bb != EXIT_BLOCK_PTR)
2455 fputc ('\n', file);
2458 fputs ("}\n\n", file);
2463 /*---------------------------------------------------------------------------
2464 Miscellaneous helpers
2465 ---------------------------------------------------------------------------*/
2467 /* Return true if T represents a stmt that always transfers control. */
2469 bool
2470 is_ctrl_stmt (tree t)
2472 return (TREE_CODE (t) == COND_EXPR
2473 || TREE_CODE (t) == SWITCH_EXPR
2474 || TREE_CODE (t) == GOTO_EXPR
2475 || TREE_CODE (t) == RETURN_EXPR
2476 || TREE_CODE (t) == RESX_EXPR);
2480 /* Return true if T is a statement that may alter the flow of control
2481 (e.g., a call to a non-returning function). */
2483 bool
2484 is_ctrl_altering_stmt (tree t)
2486 tree call;
2488 gcc_assert (t);
2489 call = get_call_expr_in (t);
2490 if (call)
2492 /* A non-pure/const CALL_EXPR alters flow control if the current
2493 function has nonlocal labels. */
2494 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2495 return true;
2497 /* A CALL_EXPR also alters control flow if it does not return. */
2498 if (call_expr_flags (call) & ECF_NORETURN)
2499 return true;
2502 /* OpenMP directives alter control flow. */
2503 if (OMP_DIRECTIVE_P (t))
2504 return true;
2506 /* If a statement can throw, it alters control flow. */
2507 return tree_can_throw_internal (t);
2511 /* Return true if T is a computed goto. */
2513 bool
2514 computed_goto_p (tree t)
2516 return (TREE_CODE (t) == GOTO_EXPR
2517 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2521 /* Return true if T is a simple local goto. */
2523 bool
2524 simple_goto_p (tree t)
2526 return (TREE_CODE (t) == GOTO_EXPR
2527 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2531 /* Return true if T can make an abnormal transfer of control flow.
2532 Transfers of control flow associated with EH are excluded. */
2534 bool
2535 tree_can_make_abnormal_goto (tree t)
2537 if (computed_goto_p (t))
2538 return true;
2539 if (TREE_CODE (t) == MODIFY_EXPR)
2540 t = TREE_OPERAND (t, 1);
2541 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2542 t = TREE_OPERAND (t, 0);
2543 if (TREE_CODE (t) == CALL_EXPR)
2544 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2545 return false;
2549 /* Return true if T should start a new basic block. PREV_T is the
2550 statement preceding T. It is used when T is a label or a case label.
2551 Labels should only start a new basic block if their previous statement
2552 wasn't a label. Otherwise, sequence of labels would generate
2553 unnecessary basic blocks that only contain a single label. */
2555 static inline bool
2556 stmt_starts_bb_p (tree t, tree prev_t)
2558 if (t == NULL_TREE)
2559 return false;
2561 /* LABEL_EXPRs start a new basic block only if the preceding
2562 statement wasn't a label of the same type. This prevents the
2563 creation of consecutive blocks that have nothing but a single
2564 label. */
2565 if (TREE_CODE (t) == LABEL_EXPR)
2567 /* Nonlocal and computed GOTO targets always start a new block. */
2568 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2569 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2570 return true;
2572 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2574 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2575 return true;
2577 cfg_stats.num_merged_labels++;
2578 return false;
2580 else
2581 return true;
2584 return false;
2588 /* Return true if T should end a basic block. */
2590 bool
2591 stmt_ends_bb_p (tree t)
2593 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2597 /* Add gotos that used to be represented implicitly in the CFG. */
2599 void
2600 disband_implicit_edges (void)
2602 basic_block bb;
2603 block_stmt_iterator last;
2604 edge e;
2605 edge_iterator ei;
2606 tree stmt, label;
2608 FOR_EACH_BB (bb)
2610 last = bsi_last (bb);
2611 stmt = last_stmt (bb);
2613 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2615 /* Remove superfluous gotos from COND_EXPR branches. Moved
2616 from cfg_remove_useless_stmts here since it violates the
2617 invariants for tree--cfg correspondence and thus fits better
2618 here where we do it anyway. */
2619 e = find_edge (bb, bb->next_bb);
2620 if (e)
2622 if (e->flags & EDGE_TRUE_VALUE)
2623 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2624 else if (e->flags & EDGE_FALSE_VALUE)
2625 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2626 else
2627 gcc_unreachable ();
2628 e->flags |= EDGE_FALLTHRU;
2631 continue;
2634 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2636 /* Remove the RETURN_EXPR if we may fall though to the exit
2637 instead. */
2638 gcc_assert (single_succ_p (bb));
2639 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2641 if (bb->next_bb == EXIT_BLOCK_PTR
2642 && !TREE_OPERAND (stmt, 0))
2644 bsi_remove (&last, true);
2645 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2647 continue;
2650 /* There can be no fallthru edge if the last statement is a control
2651 one. */
2652 if (stmt && is_ctrl_stmt (stmt))
2653 continue;
2655 /* Find a fallthru edge and emit the goto if necessary. */
2656 FOR_EACH_EDGE (e, ei, bb->succs)
2657 if (e->flags & EDGE_FALLTHRU)
2658 break;
2660 if (!e || e->dest == bb->next_bb)
2661 continue;
2663 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2664 label = tree_block_label (e->dest);
2666 stmt = build1 (GOTO_EXPR, void_type_node, label);
2667 #ifdef USE_MAPPED_LOCATION
2668 SET_EXPR_LOCATION (stmt, e->goto_locus);
2669 #else
2670 SET_EXPR_LOCUS (stmt, e->goto_locus);
2671 #endif
2672 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2673 e->flags &= ~EDGE_FALLTHRU;
2677 /* Remove block annotations and other datastructures. */
2679 void
2680 delete_tree_cfg_annotations (void)
2682 label_to_block_map = NULL;
2686 /* Return the first statement in basic block BB. */
2688 tree
2689 first_stmt (basic_block bb)
2691 block_stmt_iterator i = bsi_start (bb);
2692 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2696 /* Return the last statement in basic block BB. */
2698 tree
2699 last_stmt (basic_block bb)
2701 block_stmt_iterator b = bsi_last (bb);
2702 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2706 /* Return a pointer to the last statement in block BB. */
2708 tree *
2709 last_stmt_ptr (basic_block bb)
2711 block_stmt_iterator last = bsi_last (bb);
2712 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2716 /* Return the last statement of an otherwise empty block. Return NULL
2717 if the block is totally empty, or if it contains more than one
2718 statement. */
2720 tree
2721 last_and_only_stmt (basic_block bb)
2723 block_stmt_iterator i = bsi_last (bb);
2724 tree last, prev;
2726 if (bsi_end_p (i))
2727 return NULL_TREE;
2729 last = bsi_stmt (i);
2730 bsi_prev (&i);
2731 if (bsi_end_p (i))
2732 return last;
2734 /* Empty statements should no longer appear in the instruction stream.
2735 Everything that might have appeared before should be deleted by
2736 remove_useless_stmts, and the optimizers should just bsi_remove
2737 instead of smashing with build_empty_stmt.
2739 Thus the only thing that should appear here in a block containing
2740 one executable statement is a label. */
2741 prev = bsi_stmt (i);
2742 if (TREE_CODE (prev) == LABEL_EXPR)
2743 return last;
2744 else
2745 return NULL_TREE;
2749 /* Mark BB as the basic block holding statement T. */
2751 void
2752 set_bb_for_stmt (tree t, basic_block bb)
2754 if (TREE_CODE (t) == PHI_NODE)
2755 PHI_BB (t) = bb;
2756 else if (TREE_CODE (t) == STATEMENT_LIST)
2758 tree_stmt_iterator i;
2759 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2760 set_bb_for_stmt (tsi_stmt (i), bb);
2762 else
2764 stmt_ann_t ann = get_stmt_ann (t);
2765 ann->bb = bb;
2767 /* If the statement is a label, add the label to block-to-labels map
2768 so that we can speed up edge creation for GOTO_EXPRs. */
2769 if (TREE_CODE (t) == LABEL_EXPR)
2771 int uid;
2773 t = LABEL_EXPR_LABEL (t);
2774 uid = LABEL_DECL_UID (t);
2775 if (uid == -1)
2777 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2778 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2779 if (old_len <= (unsigned) uid)
2781 basic_block *addr;
2782 unsigned new_len = 3 * uid / 2;
2784 VEC_safe_grow (basic_block, gc, label_to_block_map,
2785 new_len);
2786 addr = VEC_address (basic_block, label_to_block_map);
2787 memset (&addr[old_len],
2788 0, sizeof (basic_block) * (new_len - old_len));
2791 else
2792 /* We're moving an existing label. Make sure that we've
2793 removed it from the old block. */
2794 gcc_assert (!bb
2795 || !VEC_index (basic_block, label_to_block_map, uid));
2796 VEC_replace (basic_block, label_to_block_map, uid, bb);
2801 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2802 from one basic block to another.
2803 For BB splitting we can run into quadratic case, so performance is quite
2804 important and knowing that the tables are big enough, change_bb_for_stmt
2805 can inline as leaf function. */
2806 static inline void
2807 change_bb_for_stmt (tree t, basic_block bb)
2809 get_stmt_ann (t)->bb = bb;
2810 if (TREE_CODE (t) == LABEL_EXPR)
2811 VEC_replace (basic_block, label_to_block_map,
2812 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2815 /* Finds iterator for STMT. */
2817 extern block_stmt_iterator
2818 bsi_for_stmt (tree stmt)
2820 block_stmt_iterator bsi;
2822 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2823 if (bsi_stmt (bsi) == stmt)
2824 return bsi;
2826 gcc_unreachable ();
2829 /* Mark statement T as modified, and update it. */
2830 static inline void
2831 update_modified_stmts (tree t)
2833 if (TREE_CODE (t) == STATEMENT_LIST)
2835 tree_stmt_iterator i;
2836 tree stmt;
2837 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2839 stmt = tsi_stmt (i);
2840 update_stmt_if_modified (stmt);
2843 else
2844 update_stmt_if_modified (t);
2847 /* Insert statement (or statement list) T before the statement
2848 pointed-to by iterator I. M specifies how to update iterator I
2849 after insertion (see enum bsi_iterator_update). */
2851 void
2852 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2854 set_bb_for_stmt (t, i->bb);
2855 update_modified_stmts (t);
2856 tsi_link_before (&i->tsi, t, m);
2860 /* Insert statement (or statement list) T after the statement
2861 pointed-to by iterator I. M specifies how to update iterator I
2862 after insertion (see enum bsi_iterator_update). */
2864 void
2865 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2867 set_bb_for_stmt (t, i->bb);
2868 update_modified_stmts (t);
2869 tsi_link_after (&i->tsi, t, m);
2873 /* Remove the statement pointed to by iterator I. The iterator is updated
2874 to the next statement.
2876 When REMOVE_EH_INFO is true we remove the statement pointed to by
2877 iterator I from the EH tables. Otherwise we do not modify the EH
2878 tables.
2880 Generally, REMOVE_EH_INFO should be true when the statement is going to
2881 be removed from the IL and not reinserted elsewhere. */
2883 void
2884 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2886 tree t = bsi_stmt (*i);
2887 set_bb_for_stmt (t, NULL);
2888 delink_stmt_imm_use (t);
2889 tsi_delink (&i->tsi);
2890 mark_stmt_modified (t);
2891 if (remove_eh_info)
2892 remove_stmt_from_eh_region (t);
2896 /* Move the statement at FROM so it comes right after the statement at TO. */
2898 void
2899 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2901 tree stmt = bsi_stmt (*from);
2902 bsi_remove (from, false);
2903 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2907 /* Move the statement at FROM so it comes right before the statement at TO. */
2909 void
2910 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2912 tree stmt = bsi_stmt (*from);
2913 bsi_remove (from, false);
2914 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2918 /* Move the statement at FROM to the end of basic block BB. */
2920 void
2921 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2923 block_stmt_iterator last = bsi_last (bb);
2925 /* Have to check bsi_end_p because it could be an empty block. */
2926 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2927 bsi_move_before (from, &last);
2928 else
2929 bsi_move_after (from, &last);
2933 /* Replace the contents of the statement pointed to by iterator BSI
2934 with STMT. If UPDATE_EH_INFO is true, the exception handling
2935 information of the original statement is moved to the new statement. */
2937 void
2938 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2940 int eh_region;
2941 tree orig_stmt = bsi_stmt (*bsi);
2943 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2944 set_bb_for_stmt (stmt, bsi->bb);
2946 /* Preserve EH region information from the original statement, if
2947 requested by the caller. */
2948 if (update_eh_info)
2950 eh_region = lookup_stmt_eh_region (orig_stmt);
2951 if (eh_region >= 0)
2953 remove_stmt_from_eh_region (orig_stmt);
2954 add_stmt_to_eh_region (stmt, eh_region);
2958 delink_stmt_imm_use (orig_stmt);
2959 *bsi_stmt_ptr (*bsi) = stmt;
2960 mark_stmt_modified (stmt);
2961 update_modified_stmts (stmt);
2965 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2966 is made to place the statement in an existing basic block, but
2967 sometimes that isn't possible. When it isn't possible, the edge is
2968 split and the statement is added to the new block.
2970 In all cases, the returned *BSI points to the correct location. The
2971 return value is true if insertion should be done after the location,
2972 or false if it should be done before the location. If new basic block
2973 has to be created, it is stored in *NEW_BB. */
2975 static bool
2976 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2977 basic_block *new_bb)
2979 basic_block dest, src;
2980 tree tmp;
2982 dest = e->dest;
2983 restart:
2985 /* If the destination has one predecessor which has no PHI nodes,
2986 insert there. Except for the exit block.
2988 The requirement for no PHI nodes could be relaxed. Basically we
2989 would have to examine the PHIs to prove that none of them used
2990 the value set by the statement we want to insert on E. That
2991 hardly seems worth the effort. */
2992 if (single_pred_p (dest)
2993 && ! phi_nodes (dest)
2994 && dest != EXIT_BLOCK_PTR)
2996 *bsi = bsi_start (dest);
2997 if (bsi_end_p (*bsi))
2998 return true;
3000 /* Make sure we insert after any leading labels. */
3001 tmp = bsi_stmt (*bsi);
3002 while (TREE_CODE (tmp) == LABEL_EXPR)
3004 bsi_next (bsi);
3005 if (bsi_end_p (*bsi))
3006 break;
3007 tmp = bsi_stmt (*bsi);
3010 if (bsi_end_p (*bsi))
3012 *bsi = bsi_last (dest);
3013 return true;
3015 else
3016 return false;
3019 /* If the source has one successor, the edge is not abnormal and
3020 the last statement does not end a basic block, insert there.
3021 Except for the entry block. */
3022 src = e->src;
3023 if ((e->flags & EDGE_ABNORMAL) == 0
3024 && single_succ_p (src)
3025 && src != ENTRY_BLOCK_PTR)
3027 *bsi = bsi_last (src);
3028 if (bsi_end_p (*bsi))
3029 return true;
3031 tmp = bsi_stmt (*bsi);
3032 if (!stmt_ends_bb_p (tmp))
3033 return true;
3035 /* Insert code just before returning the value. We may need to decompose
3036 the return in the case it contains non-trivial operand. */
3037 if (TREE_CODE (tmp) == RETURN_EXPR)
3039 tree op = TREE_OPERAND (tmp, 0);
3040 if (op && !is_gimple_val (op))
3042 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3043 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3044 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3046 bsi_prev (bsi);
3047 return true;
3051 /* Otherwise, create a new basic block, and split this edge. */
3052 dest = split_edge (e);
3053 if (new_bb)
3054 *new_bb = dest;
3055 e = single_pred_edge (dest);
3056 goto restart;
3060 /* This routine will commit all pending edge insertions, creating any new
3061 basic blocks which are necessary. */
3063 void
3064 bsi_commit_edge_inserts (void)
3066 basic_block bb;
3067 edge e;
3068 edge_iterator ei;
3070 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3072 FOR_EACH_BB (bb)
3073 FOR_EACH_EDGE (e, ei, bb->succs)
3074 bsi_commit_one_edge_insert (e, NULL);
3078 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3079 to this block, otherwise set it to NULL. */
3081 void
3082 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3084 if (new_bb)
3085 *new_bb = NULL;
3086 if (PENDING_STMT (e))
3088 block_stmt_iterator bsi;
3089 tree stmt = PENDING_STMT (e);
3091 PENDING_STMT (e) = NULL_TREE;
3093 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3094 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3095 else
3096 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3101 /* Add STMT to the pending list of edge E. No actual insertion is
3102 made until a call to bsi_commit_edge_inserts () is made. */
3104 void
3105 bsi_insert_on_edge (edge e, tree stmt)
3107 append_to_statement_list (stmt, &PENDING_STMT (e));
3110 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3111 block has to be created, it is returned. */
3113 basic_block
3114 bsi_insert_on_edge_immediate (edge e, tree stmt)
3116 block_stmt_iterator bsi;
3117 basic_block new_bb = NULL;
3119 gcc_assert (!PENDING_STMT (e));
3121 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3122 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3123 else
3124 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3126 return new_bb;
3129 /*---------------------------------------------------------------------------
3130 Tree specific functions for CFG manipulation
3131 ---------------------------------------------------------------------------*/
3133 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3135 static void
3136 reinstall_phi_args (edge new_edge, edge old_edge)
3138 tree var, phi;
3140 if (!PENDING_STMT (old_edge))
3141 return;
3143 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3144 var && phi;
3145 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3147 tree result = TREE_PURPOSE (var);
3148 tree arg = TREE_VALUE (var);
3150 gcc_assert (result == PHI_RESULT (phi));
3152 add_phi_arg (phi, arg, new_edge);
3155 PENDING_STMT (old_edge) = NULL;
3158 /* Returns the basic block after which the new basic block created
3159 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3160 near its "logical" location. This is of most help to humans looking
3161 at debugging dumps. */
3163 static basic_block
3164 split_edge_bb_loc (edge edge_in)
3166 basic_block dest = edge_in->dest;
3168 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3169 return edge_in->src;
3170 else
3171 return dest->prev_bb;
3174 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3175 Abort on abnormal edges. */
3177 static basic_block
3178 tree_split_edge (edge edge_in)
3180 basic_block new_bb, after_bb, dest;
3181 edge new_edge, e;
3183 /* Abnormal edges cannot be split. */
3184 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3186 dest = edge_in->dest;
3188 after_bb = split_edge_bb_loc (edge_in);
3190 new_bb = create_empty_bb (after_bb);
3191 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3192 new_bb->count = edge_in->count;
3193 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3194 new_edge->probability = REG_BR_PROB_BASE;
3195 new_edge->count = edge_in->count;
3197 e = redirect_edge_and_branch (edge_in, new_bb);
3198 gcc_assert (e);
3199 reinstall_phi_args (new_edge, e);
3201 return new_bb;
3205 /* Return true when BB has label LABEL in it. */
3207 static bool
3208 has_label_p (basic_block bb, tree label)
3210 block_stmt_iterator bsi;
3212 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3214 tree stmt = bsi_stmt (bsi);
3216 if (TREE_CODE (stmt) != LABEL_EXPR)
3217 return false;
3218 if (LABEL_EXPR_LABEL (stmt) == label)
3219 return true;
3221 return false;
3225 /* Callback for walk_tree, check that all elements with address taken are
3226 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3227 inside a PHI node. */
3229 static tree
3230 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3232 tree t = *tp, x;
3233 bool in_phi = (data != NULL);
3235 if (TYPE_P (t))
3236 *walk_subtrees = 0;
3238 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3239 #define CHECK_OP(N, MSG) \
3240 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3241 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3243 switch (TREE_CODE (t))
3245 case SSA_NAME:
3246 if (SSA_NAME_IN_FREE_LIST (t))
3248 error ("SSA name in freelist but still referenced");
3249 return *tp;
3251 break;
3253 case ASSERT_EXPR:
3254 x = fold (ASSERT_EXPR_COND (t));
3255 if (x == boolean_false_node)
3257 error ("ASSERT_EXPR with an always-false condition");
3258 return *tp;
3260 break;
3262 case MODIFY_EXPR:
3263 x = TREE_OPERAND (t, 0);
3264 if (TREE_CODE (x) == BIT_FIELD_REF
3265 && is_gimple_reg (TREE_OPERAND (x, 0)))
3267 error ("GIMPLE register modified with BIT_FIELD_REF");
3268 return t;
3270 break;
3272 case ADDR_EXPR:
3274 bool old_invariant;
3275 bool old_constant;
3276 bool old_side_effects;
3277 bool new_invariant;
3278 bool new_constant;
3279 bool new_side_effects;
3281 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3282 dead PHIs that take the address of something. But if the PHI
3283 result is dead, the fact that it takes the address of anything
3284 is irrelevant. Because we can not tell from here if a PHI result
3285 is dead, we just skip this check for PHIs altogether. This means
3286 we may be missing "valid" checks, but what can you do?
3287 This was PR19217. */
3288 if (in_phi)
3289 break;
3291 old_invariant = TREE_INVARIANT (t);
3292 old_constant = TREE_CONSTANT (t);
3293 old_side_effects = TREE_SIDE_EFFECTS (t);
3295 recompute_tree_invariant_for_addr_expr (t);
3296 new_invariant = TREE_INVARIANT (t);
3297 new_side_effects = TREE_SIDE_EFFECTS (t);
3298 new_constant = TREE_CONSTANT (t);
3300 if (old_invariant != new_invariant)
3302 error ("invariant not recomputed when ADDR_EXPR changed");
3303 return t;
3306 if (old_constant != new_constant)
3308 error ("constant not recomputed when ADDR_EXPR changed");
3309 return t;
3311 if (old_side_effects != new_side_effects)
3313 error ("side effects not recomputed when ADDR_EXPR changed");
3314 return t;
3317 /* Skip any references (they will be checked when we recurse down the
3318 tree) and ensure that any variable used as a prefix is marked
3319 addressable. */
3320 for (x = TREE_OPERAND (t, 0);
3321 handled_component_p (x);
3322 x = TREE_OPERAND (x, 0))
3325 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3326 return NULL;
3327 if (!TREE_ADDRESSABLE (x))
3329 error ("address taken, but ADDRESSABLE bit not set");
3330 return x;
3332 break;
3335 case COND_EXPR:
3336 x = COND_EXPR_COND (t);
3337 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3339 error ("non-boolean used in condition");
3340 return x;
3342 if (!is_gimple_condexpr (x))
3344 error ("invalid conditional operand");
3345 return x;
3347 break;
3349 case NOP_EXPR:
3350 case CONVERT_EXPR:
3351 case FIX_TRUNC_EXPR:
3352 case FIX_CEIL_EXPR:
3353 case FIX_FLOOR_EXPR:
3354 case FIX_ROUND_EXPR:
3355 case FLOAT_EXPR:
3356 case NEGATE_EXPR:
3357 case ABS_EXPR:
3358 case BIT_NOT_EXPR:
3359 case NON_LVALUE_EXPR:
3360 case TRUTH_NOT_EXPR:
3361 CHECK_OP (0, "invalid operand to unary operator");
3362 break;
3364 case REALPART_EXPR:
3365 case IMAGPART_EXPR:
3366 case COMPONENT_REF:
3367 case ARRAY_REF:
3368 case ARRAY_RANGE_REF:
3369 case BIT_FIELD_REF:
3370 case VIEW_CONVERT_EXPR:
3371 /* We have a nest of references. Verify that each of the operands
3372 that determine where to reference is either a constant or a variable,
3373 verify that the base is valid, and then show we've already checked
3374 the subtrees. */
3375 while (handled_component_p (t))
3377 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3378 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3379 else if (TREE_CODE (t) == ARRAY_REF
3380 || TREE_CODE (t) == ARRAY_RANGE_REF)
3382 CHECK_OP (1, "invalid array index");
3383 if (TREE_OPERAND (t, 2))
3384 CHECK_OP (2, "invalid array lower bound");
3385 if (TREE_OPERAND (t, 3))
3386 CHECK_OP (3, "invalid array stride");
3388 else if (TREE_CODE (t) == BIT_FIELD_REF)
3390 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3391 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3394 t = TREE_OPERAND (t, 0);
3397 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3399 error ("invalid reference prefix");
3400 return t;
3402 *walk_subtrees = 0;
3403 break;
3405 case LT_EXPR:
3406 case LE_EXPR:
3407 case GT_EXPR:
3408 case GE_EXPR:
3409 case EQ_EXPR:
3410 case NE_EXPR:
3411 case UNORDERED_EXPR:
3412 case ORDERED_EXPR:
3413 case UNLT_EXPR:
3414 case UNLE_EXPR:
3415 case UNGT_EXPR:
3416 case UNGE_EXPR:
3417 case UNEQ_EXPR:
3418 case LTGT_EXPR:
3419 case PLUS_EXPR:
3420 case MINUS_EXPR:
3421 case MULT_EXPR:
3422 case TRUNC_DIV_EXPR:
3423 case CEIL_DIV_EXPR:
3424 case FLOOR_DIV_EXPR:
3425 case ROUND_DIV_EXPR:
3426 case TRUNC_MOD_EXPR:
3427 case CEIL_MOD_EXPR:
3428 case FLOOR_MOD_EXPR:
3429 case ROUND_MOD_EXPR:
3430 case RDIV_EXPR:
3431 case EXACT_DIV_EXPR:
3432 case MIN_EXPR:
3433 case MAX_EXPR:
3434 case LSHIFT_EXPR:
3435 case RSHIFT_EXPR:
3436 case LROTATE_EXPR:
3437 case RROTATE_EXPR:
3438 case BIT_IOR_EXPR:
3439 case BIT_XOR_EXPR:
3440 case BIT_AND_EXPR:
3441 CHECK_OP (0, "invalid operand to binary operator");
3442 CHECK_OP (1, "invalid operand to binary operator");
3443 break;
3445 case CONSTRUCTOR:
3446 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3447 *walk_subtrees = 0;
3448 break;
3450 default:
3451 break;
3453 return NULL;
3455 #undef CHECK_OP
3459 /* Verify STMT, return true if STMT is not in GIMPLE form.
3460 TODO: Implement type checking. */
3462 static bool
3463 verify_stmt (tree stmt, bool last_in_block)
3465 tree addr;
3467 if (OMP_DIRECTIVE_P (stmt))
3469 /* OpenMP directives are validated by the FE and never operated
3470 on by the optimizers. Furthermore, OMP_FOR may contain
3471 non-gimple expressions when the main index variable has had
3472 its address taken. This does not affect the loop itself
3473 because the header of an OMP_FOR is merely used to determine
3474 how to setup the parallel iteration. */
3475 return false;
3478 if (!is_gimple_stmt (stmt))
3480 error ("is not a valid GIMPLE statement");
3481 goto fail;
3484 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3485 if (addr)
3487 debug_generic_stmt (addr);
3488 return true;
3491 /* If the statement is marked as part of an EH region, then it is
3492 expected that the statement could throw. Verify that when we
3493 have optimizations that simplify statements such that we prove
3494 that they cannot throw, that we update other data structures
3495 to match. */
3496 if (lookup_stmt_eh_region (stmt) >= 0)
3498 if (!tree_could_throw_p (stmt))
3500 error ("statement marked for throw, but doesn%'t");
3501 goto fail;
3503 if (!last_in_block && tree_can_throw_internal (stmt))
3505 error ("statement marked for throw in middle of block");
3506 goto fail;
3510 return false;
3512 fail:
3513 debug_generic_stmt (stmt);
3514 return true;
3518 /* Return true when the T can be shared. */
3520 static bool
3521 tree_node_can_be_shared (tree t)
3523 if (IS_TYPE_OR_DECL_P (t)
3524 || is_gimple_min_invariant (t)
3525 || TREE_CODE (t) == SSA_NAME
3526 || t == error_mark_node
3527 || TREE_CODE (t) == IDENTIFIER_NODE)
3528 return true;
3530 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3531 return true;
3533 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3534 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3535 || TREE_CODE (t) == COMPONENT_REF
3536 || TREE_CODE (t) == REALPART_EXPR
3537 || TREE_CODE (t) == IMAGPART_EXPR)
3538 t = TREE_OPERAND (t, 0);
3540 if (DECL_P (t))
3541 return true;
3543 return false;
3547 /* Called via walk_trees. Verify tree sharing. */
3549 static tree
3550 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3552 htab_t htab = (htab_t) data;
3553 void **slot;
3555 if (tree_node_can_be_shared (*tp))
3557 *walk_subtrees = false;
3558 return NULL;
3561 slot = htab_find_slot (htab, *tp, INSERT);
3562 if (*slot)
3563 return (tree) *slot;
3564 *slot = *tp;
3566 return NULL;
3570 /* Verify the GIMPLE statement chain. */
3572 void
3573 verify_stmts (void)
3575 basic_block bb;
3576 block_stmt_iterator bsi;
3577 bool err = false;
3578 htab_t htab;
3579 tree addr;
3581 timevar_push (TV_TREE_STMT_VERIFY);
3582 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3584 FOR_EACH_BB (bb)
3586 tree phi;
3587 int i;
3589 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3591 int phi_num_args = PHI_NUM_ARGS (phi);
3593 if (bb_for_stmt (phi) != bb)
3595 error ("bb_for_stmt (phi) is set to a wrong basic block");
3596 err |= true;
3599 for (i = 0; i < phi_num_args; i++)
3601 tree t = PHI_ARG_DEF (phi, i);
3602 tree addr;
3604 /* Addressable variables do have SSA_NAMEs but they
3605 are not considered gimple values. */
3606 if (TREE_CODE (t) != SSA_NAME
3607 && TREE_CODE (t) != FUNCTION_DECL
3608 && !is_gimple_val (t))
3610 error ("PHI def is not a GIMPLE value");
3611 debug_generic_stmt (phi);
3612 debug_generic_stmt (t);
3613 err |= true;
3616 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3617 if (addr)
3619 debug_generic_stmt (addr);
3620 err |= true;
3623 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3624 if (addr)
3626 error ("incorrect sharing of tree nodes");
3627 debug_generic_stmt (phi);
3628 debug_generic_stmt (addr);
3629 err |= true;
3634 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3636 tree stmt = bsi_stmt (bsi);
3638 if (bb_for_stmt (stmt) != bb)
3640 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3641 err |= true;
3644 bsi_next (&bsi);
3645 err |= verify_stmt (stmt, bsi_end_p (bsi));
3646 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3647 if (addr)
3649 error ("incorrect sharing of tree nodes");
3650 debug_generic_stmt (stmt);
3651 debug_generic_stmt (addr);
3652 err |= true;
3657 if (err)
3658 internal_error ("verify_stmts failed");
3660 htab_delete (htab);
3661 timevar_pop (TV_TREE_STMT_VERIFY);
3665 /* Verifies that the flow information is OK. */
3667 static int
3668 tree_verify_flow_info (void)
3670 int err = 0;
3671 basic_block bb;
3672 block_stmt_iterator bsi;
3673 tree stmt;
3674 edge e;
3675 edge_iterator ei;
3677 if (ENTRY_BLOCK_PTR->stmt_list)
3679 error ("ENTRY_BLOCK has a statement list associated with it");
3680 err = 1;
3683 if (EXIT_BLOCK_PTR->stmt_list)
3685 error ("EXIT_BLOCK has a statement list associated with it");
3686 err = 1;
3689 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3690 if (e->flags & EDGE_FALLTHRU)
3692 error ("fallthru to exit from bb %d", e->src->index);
3693 err = 1;
3696 FOR_EACH_BB (bb)
3698 bool found_ctrl_stmt = false;
3700 stmt = NULL_TREE;
3702 /* Skip labels on the start of basic block. */
3703 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3705 tree prev_stmt = stmt;
3707 stmt = bsi_stmt (bsi);
3709 if (TREE_CODE (stmt) != LABEL_EXPR)
3710 break;
3712 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3714 error ("nonlocal label ");
3715 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3716 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3717 bb->index);
3718 err = 1;
3721 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3723 error ("label ");
3724 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3725 fprintf (stderr, " to block does not match in bb %d",
3726 bb->index);
3727 err = 1;
3730 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3731 != current_function_decl)
3733 error ("label ");
3734 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3735 fprintf (stderr, " has incorrect context in bb %d",
3736 bb->index);
3737 err = 1;
3741 /* Verify that body of basic block BB is free of control flow. */
3742 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3744 tree stmt = bsi_stmt (bsi);
3746 if (found_ctrl_stmt)
3748 error ("control flow in the middle of basic block %d",
3749 bb->index);
3750 err = 1;
3753 if (stmt_ends_bb_p (stmt))
3754 found_ctrl_stmt = true;
3756 if (TREE_CODE (stmt) == LABEL_EXPR)
3758 error ("label ");
3759 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3760 fprintf (stderr, " in the middle of basic block %d", bb->index);
3761 err = 1;
3765 bsi = bsi_last (bb);
3766 if (bsi_end_p (bsi))
3767 continue;
3769 stmt = bsi_stmt (bsi);
3771 err |= verify_eh_edges (stmt);
3773 if (is_ctrl_stmt (stmt))
3775 FOR_EACH_EDGE (e, ei, bb->succs)
3776 if (e->flags & EDGE_FALLTHRU)
3778 error ("fallthru edge after a control statement in bb %d",
3779 bb->index);
3780 err = 1;
3784 if (TREE_CODE (stmt) != COND_EXPR)
3786 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3787 after anything else but if statement. */
3788 FOR_EACH_EDGE (e, ei, bb->succs)
3789 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3791 error ("true/false edge after a non-COND_EXPR in bb %d",
3792 bb->index);
3793 err = 1;
3797 switch (TREE_CODE (stmt))
3799 case COND_EXPR:
3801 edge true_edge;
3802 edge false_edge;
3803 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3804 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3806 error ("structured COND_EXPR at the end of bb %d", bb->index);
3807 err = 1;
3810 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3812 if (!true_edge || !false_edge
3813 || !(true_edge->flags & EDGE_TRUE_VALUE)
3814 || !(false_edge->flags & EDGE_FALSE_VALUE)
3815 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3816 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3817 || EDGE_COUNT (bb->succs) >= 3)
3819 error ("wrong outgoing edge flags at end of bb %d",
3820 bb->index);
3821 err = 1;
3824 if (!has_label_p (true_edge->dest,
3825 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3827 error ("%<then%> label does not match edge at end of bb %d",
3828 bb->index);
3829 err = 1;
3832 if (!has_label_p (false_edge->dest,
3833 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3835 error ("%<else%> label does not match edge at end of bb %d",
3836 bb->index);
3837 err = 1;
3840 break;
3842 case GOTO_EXPR:
3843 if (simple_goto_p (stmt))
3845 error ("explicit goto at end of bb %d", bb->index);
3846 err = 1;
3848 else
3850 /* FIXME. We should double check that the labels in the
3851 destination blocks have their address taken. */
3852 FOR_EACH_EDGE (e, ei, bb->succs)
3853 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3854 | EDGE_FALSE_VALUE))
3855 || !(e->flags & EDGE_ABNORMAL))
3857 error ("wrong outgoing edge flags at end of bb %d",
3858 bb->index);
3859 err = 1;
3862 break;
3864 case RETURN_EXPR:
3865 if (!single_succ_p (bb)
3866 || (single_succ_edge (bb)->flags
3867 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3868 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3870 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3871 err = 1;
3873 if (single_succ (bb) != EXIT_BLOCK_PTR)
3875 error ("return edge does not point to exit in bb %d",
3876 bb->index);
3877 err = 1;
3879 break;
3881 case SWITCH_EXPR:
3883 tree prev;
3884 edge e;
3885 size_t i, n;
3886 tree vec;
3888 vec = SWITCH_LABELS (stmt);
3889 n = TREE_VEC_LENGTH (vec);
3891 /* Mark all the destination basic blocks. */
3892 for (i = 0; i < n; ++i)
3894 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3895 basic_block label_bb = label_to_block (lab);
3897 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3898 label_bb->aux = (void *)1;
3901 /* Verify that the case labels are sorted. */
3902 prev = TREE_VEC_ELT (vec, 0);
3903 for (i = 1; i < n - 1; ++i)
3905 tree c = TREE_VEC_ELT (vec, i);
3906 if (! CASE_LOW (c))
3908 error ("found default case not at end of case vector");
3909 err = 1;
3910 continue;
3912 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3914 error ("case labels not sorted: ");
3915 print_generic_expr (stderr, prev, 0);
3916 fprintf (stderr," is greater than ");
3917 print_generic_expr (stderr, c, 0);
3918 fprintf (stderr," but comes before it.\n");
3919 err = 1;
3921 prev = c;
3923 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3925 error ("no default case found at end of case vector");
3926 err = 1;
3929 FOR_EACH_EDGE (e, ei, bb->succs)
3931 if (!e->dest->aux)
3933 error ("extra outgoing edge %d->%d",
3934 bb->index, e->dest->index);
3935 err = 1;
3937 e->dest->aux = (void *)2;
3938 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3939 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3941 error ("wrong outgoing edge flags at end of bb %d",
3942 bb->index);
3943 err = 1;
3947 /* Check that we have all of them. */
3948 for (i = 0; i < n; ++i)
3950 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3951 basic_block label_bb = label_to_block (lab);
3953 if (label_bb->aux != (void *)2)
3955 error ("missing edge %i->%i",
3956 bb->index, label_bb->index);
3957 err = 1;
3961 FOR_EACH_EDGE (e, ei, bb->succs)
3962 e->dest->aux = (void *)0;
3965 default: ;
3969 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3970 verify_dominators (CDI_DOMINATORS);
3972 return err;
3976 /* Updates phi nodes after creating a forwarder block joined
3977 by edge FALLTHRU. */
3979 static void
3980 tree_make_forwarder_block (edge fallthru)
3982 edge e;
3983 edge_iterator ei;
3984 basic_block dummy, bb;
3985 tree phi, new_phi, var;
3987 dummy = fallthru->src;
3988 bb = fallthru->dest;
3990 if (single_pred_p (bb))
3991 return;
3993 /* If we redirected a branch we must create new phi nodes at the
3994 start of BB. */
3995 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3997 var = PHI_RESULT (phi);
3998 new_phi = create_phi_node (var, bb);
3999 SSA_NAME_DEF_STMT (var) = new_phi;
4000 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4001 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4004 /* Ensure that the PHI node chain is in the same order. */
4005 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4007 /* Add the arguments we have stored on edges. */
4008 FOR_EACH_EDGE (e, ei, bb->preds)
4010 if (e == fallthru)
4011 continue;
4013 flush_pending_stmts (e);
4018 /* Return a non-special label in the head of basic block BLOCK.
4019 Create one if it doesn't exist. */
4021 tree
4022 tree_block_label (basic_block bb)
4024 block_stmt_iterator i, s = bsi_start (bb);
4025 bool first = true;
4026 tree label, stmt;
4028 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4030 stmt = bsi_stmt (i);
4031 if (TREE_CODE (stmt) != LABEL_EXPR)
4032 break;
4033 label = LABEL_EXPR_LABEL (stmt);
4034 if (!DECL_NONLOCAL (label))
4036 if (!first)
4037 bsi_move_before (&i, &s);
4038 return label;
4042 label = create_artificial_label ();
4043 stmt = build1 (LABEL_EXPR, void_type_node, label);
4044 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4045 return label;
4049 /* Attempt to perform edge redirection by replacing a possibly complex
4050 jump instruction by a goto or by removing the jump completely.
4051 This can apply only if all edges now point to the same block. The
4052 parameters and return values are equivalent to
4053 redirect_edge_and_branch. */
4055 static edge
4056 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4058 basic_block src = e->src;
4059 block_stmt_iterator b;
4060 tree stmt;
4062 /* We can replace or remove a complex jump only when we have exactly
4063 two edges. */
4064 if (EDGE_COUNT (src->succs) != 2
4065 /* Verify that all targets will be TARGET. Specifically, the
4066 edge that is not E must also go to TARGET. */
4067 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4068 return NULL;
4070 b = bsi_last (src);
4071 if (bsi_end_p (b))
4072 return NULL;
4073 stmt = bsi_stmt (b);
4075 if (TREE_CODE (stmt) == COND_EXPR
4076 || TREE_CODE (stmt) == SWITCH_EXPR)
4078 bsi_remove (&b, true);
4079 e = ssa_redirect_edge (e, target);
4080 e->flags = EDGE_FALLTHRU;
4081 return e;
4084 return NULL;
4088 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4089 edge representing the redirected branch. */
4091 static edge
4092 tree_redirect_edge_and_branch (edge e, basic_block dest)
4094 basic_block bb = e->src;
4095 block_stmt_iterator bsi;
4096 edge ret;
4097 tree label, stmt;
4099 if (e->flags & EDGE_ABNORMAL)
4100 return NULL;
4102 if (e->src != ENTRY_BLOCK_PTR
4103 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4104 return ret;
4106 if (e->dest == dest)
4107 return NULL;
4109 label = tree_block_label (dest);
4111 bsi = bsi_last (bb);
4112 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4114 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4116 case COND_EXPR:
4117 stmt = (e->flags & EDGE_TRUE_VALUE
4118 ? COND_EXPR_THEN (stmt)
4119 : COND_EXPR_ELSE (stmt));
4120 GOTO_DESTINATION (stmt) = label;
4121 break;
4123 case GOTO_EXPR:
4124 /* No non-abnormal edges should lead from a non-simple goto, and
4125 simple ones should be represented implicitly. */
4126 gcc_unreachable ();
4128 case SWITCH_EXPR:
4130 tree cases = get_cases_for_edge (e, stmt);
4132 /* If we have a list of cases associated with E, then use it
4133 as it's a lot faster than walking the entire case vector. */
4134 if (cases)
4136 edge e2 = find_edge (e->src, dest);
4137 tree last, first;
4139 first = cases;
4140 while (cases)
4142 last = cases;
4143 CASE_LABEL (cases) = label;
4144 cases = TREE_CHAIN (cases);
4147 /* If there was already an edge in the CFG, then we need
4148 to move all the cases associated with E to E2. */
4149 if (e2)
4151 tree cases2 = get_cases_for_edge (e2, stmt);
4153 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4154 TREE_CHAIN (cases2) = first;
4157 else
4159 tree vec = SWITCH_LABELS (stmt);
4160 size_t i, n = TREE_VEC_LENGTH (vec);
4162 for (i = 0; i < n; i++)
4164 tree elt = TREE_VEC_ELT (vec, i);
4166 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4167 CASE_LABEL (elt) = label;
4171 break;
4174 case RETURN_EXPR:
4175 bsi_remove (&bsi, true);
4176 e->flags |= EDGE_FALLTHRU;
4177 break;
4179 default:
4180 /* Otherwise it must be a fallthru edge, and we don't need to
4181 do anything besides redirecting it. */
4182 gcc_assert (e->flags & EDGE_FALLTHRU);
4183 break;
4186 /* Update/insert PHI nodes as necessary. */
4188 /* Now update the edges in the CFG. */
4189 e = ssa_redirect_edge (e, dest);
4191 return e;
4195 /* Simple wrapper, as we can always redirect fallthru edges. */
4197 static basic_block
4198 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4200 e = tree_redirect_edge_and_branch (e, dest);
4201 gcc_assert (e);
4203 return NULL;
4207 /* Splits basic block BB after statement STMT (but at least after the
4208 labels). If STMT is NULL, BB is split just after the labels. */
4210 static basic_block
4211 tree_split_block (basic_block bb, void *stmt)
4213 block_stmt_iterator bsi;
4214 tree_stmt_iterator tsi_tgt;
4215 tree act;
4216 basic_block new_bb;
4217 edge e;
4218 edge_iterator ei;
4220 new_bb = create_empty_bb (bb);
4222 /* Redirect the outgoing edges. */
4223 new_bb->succs = bb->succs;
4224 bb->succs = NULL;
4225 FOR_EACH_EDGE (e, ei, new_bb->succs)
4226 e->src = new_bb;
4228 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4229 stmt = NULL;
4231 /* Move everything from BSI to the new basic block. */
4232 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4234 act = bsi_stmt (bsi);
4235 if (TREE_CODE (act) == LABEL_EXPR)
4236 continue;
4238 if (!stmt)
4239 break;
4241 if (stmt == act)
4243 bsi_next (&bsi);
4244 break;
4248 if (bsi_end_p (bsi))
4249 return new_bb;
4251 /* Split the statement list - avoid re-creating new containers as this
4252 brings ugly quadratic memory consumption in the inliner.
4253 (We are still quadratic since we need to update stmt BB pointers,
4254 sadly.) */
4255 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4256 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4257 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4258 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4260 return new_bb;
4264 /* Moves basic block BB after block AFTER. */
4266 static bool
4267 tree_move_block_after (basic_block bb, basic_block after)
4269 if (bb->prev_bb == after)
4270 return true;
4272 unlink_block (bb);
4273 link_block (bb, after);
4275 return true;
4279 /* Return true if basic_block can be duplicated. */
4281 static bool
4282 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4284 return true;
4288 /* Create a duplicate of the basic block BB. NOTE: This does not
4289 preserve SSA form. */
4291 static basic_block
4292 tree_duplicate_bb (basic_block bb)
4294 basic_block new_bb;
4295 block_stmt_iterator bsi, bsi_tgt;
4296 tree phi;
4298 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4300 /* Copy the PHI nodes. We ignore PHI node arguments here because
4301 the incoming edges have not been setup yet. */
4302 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4304 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4305 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4308 /* Keep the chain of PHI nodes in the same order so that they can be
4309 updated by ssa_redirect_edge. */
4310 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4312 bsi_tgt = bsi_start (new_bb);
4313 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4315 def_operand_p def_p;
4316 ssa_op_iter op_iter;
4317 tree stmt, copy;
4318 int region;
4320 stmt = bsi_stmt (bsi);
4321 if (TREE_CODE (stmt) == LABEL_EXPR)
4322 continue;
4324 /* Create a new copy of STMT and duplicate STMT's virtual
4325 operands. */
4326 copy = unshare_expr (stmt);
4327 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4328 copy_virtual_operands (copy, stmt);
4329 region = lookup_stmt_eh_region (stmt);
4330 if (region >= 0)
4331 add_stmt_to_eh_region (copy, region);
4333 /* Create new names for all the definitions created by COPY and
4334 add replacement mappings for each new name. */
4335 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4336 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4339 return new_bb;
4343 /* Basic block BB_COPY was created by code duplication. Add phi node
4344 arguments for edges going out of BB_COPY. The blocks that were
4345 duplicated have BB_DUPLICATED set. */
4347 void
4348 add_phi_args_after_copy_bb (basic_block bb_copy)
4350 basic_block bb, dest;
4351 edge e, e_copy;
4352 edge_iterator ei;
4353 tree phi, phi_copy, phi_next, def;
4355 bb = get_bb_original (bb_copy);
4357 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4359 if (!phi_nodes (e_copy->dest))
4360 continue;
4362 if (e_copy->dest->flags & BB_DUPLICATED)
4363 dest = get_bb_original (e_copy->dest);
4364 else
4365 dest = e_copy->dest;
4367 e = find_edge (bb, dest);
4368 if (!e)
4370 /* During loop unrolling the target of the latch edge is copied.
4371 In this case we are not looking for edge to dest, but to
4372 duplicated block whose original was dest. */
4373 FOR_EACH_EDGE (e, ei, bb->succs)
4374 if ((e->dest->flags & BB_DUPLICATED)
4375 && get_bb_original (e->dest) == dest)
4376 break;
4378 gcc_assert (e != NULL);
4381 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4382 phi;
4383 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4385 phi_next = PHI_CHAIN (phi);
4386 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4387 add_phi_arg (phi_copy, def, e_copy);
4392 /* Blocks in REGION_COPY array of length N_REGION were created by
4393 duplication of basic blocks. Add phi node arguments for edges
4394 going from these blocks. */
4396 void
4397 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4399 unsigned i;
4401 for (i = 0; i < n_region; i++)
4402 region_copy[i]->flags |= BB_DUPLICATED;
4404 for (i = 0; i < n_region; i++)
4405 add_phi_args_after_copy_bb (region_copy[i]);
4407 for (i = 0; i < n_region; i++)
4408 region_copy[i]->flags &= ~BB_DUPLICATED;
4411 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4412 important exit edge EXIT. By important we mean that no SSA name defined
4413 inside region is live over the other exit edges of the region. All entry
4414 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4415 to the duplicate of the region. SSA form, dominance and loop information
4416 is updated. The new basic blocks are stored to REGION_COPY in the same
4417 order as they had in REGION, provided that REGION_COPY is not NULL.
4418 The function returns false if it is unable to copy the region,
4419 true otherwise. */
4421 bool
4422 tree_duplicate_sese_region (edge entry, edge exit,
4423 basic_block *region, unsigned n_region,
4424 basic_block *region_copy)
4426 unsigned i, n_doms;
4427 bool free_region_copy = false, copying_header = false;
4428 struct loop *loop = entry->dest->loop_father;
4429 edge exit_copy;
4430 basic_block *doms;
4431 edge redirected;
4432 int total_freq = 0, entry_freq = 0;
4433 gcov_type total_count = 0, entry_count = 0;
4435 if (!can_copy_bbs_p (region, n_region))
4436 return false;
4438 /* Some sanity checking. Note that we do not check for all possible
4439 missuses of the functions. I.e. if you ask to copy something weird,
4440 it will work, but the state of structures probably will not be
4441 correct. */
4442 for (i = 0; i < n_region; i++)
4444 /* We do not handle subloops, i.e. all the blocks must belong to the
4445 same loop. */
4446 if (region[i]->loop_father != loop)
4447 return false;
4449 if (region[i] != entry->dest
4450 && region[i] == loop->header)
4451 return false;
4454 loop->copy = loop;
4456 /* In case the function is used for loop header copying (which is the primary
4457 use), ensure that EXIT and its copy will be new latch and entry edges. */
4458 if (loop->header == entry->dest)
4460 copying_header = true;
4461 loop->copy = loop->outer;
4463 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4464 return false;
4466 for (i = 0; i < n_region; i++)
4467 if (region[i] != exit->src
4468 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4469 return false;
4472 if (!region_copy)
4474 region_copy = XNEWVEC (basic_block, n_region);
4475 free_region_copy = true;
4478 gcc_assert (!need_ssa_update_p ());
4480 /* Record blocks outside the region that are dominated by something
4481 inside. */
4482 doms = XNEWVEC (basic_block, n_basic_blocks);
4483 initialize_original_copy_tables ();
4485 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4487 if (entry->dest->count)
4489 total_count = entry->dest->count;
4490 entry_count = entry->count;
4491 /* Fix up corner cases, to avoid division by zero or creation of negative
4492 frequencies. */
4493 if (entry_count > total_count)
4494 entry_count = total_count;
4496 else
4498 total_freq = entry->dest->frequency;
4499 entry_freq = EDGE_FREQUENCY (entry);
4500 /* Fix up corner cases, to avoid division by zero or creation of negative
4501 frequencies. */
4502 if (total_freq == 0)
4503 total_freq = 1;
4504 else if (entry_freq > total_freq)
4505 entry_freq = total_freq;
4508 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4509 split_edge_bb_loc (entry));
4510 if (total_count)
4512 scale_bbs_frequencies_gcov_type (region, n_region,
4513 total_count - entry_count,
4514 total_count);
4515 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4516 total_count);
4518 else
4520 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4521 total_freq);
4522 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4525 if (copying_header)
4527 loop->header = exit->dest;
4528 loop->latch = exit->src;
4531 /* Redirect the entry and add the phi node arguments. */
4532 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4533 gcc_assert (redirected != NULL);
4534 flush_pending_stmts (entry);
4536 /* Concerning updating of dominators: We must recount dominators
4537 for entry block and its copy. Anything that is outside of the
4538 region, but was dominated by something inside needs recounting as
4539 well. */
4540 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4541 doms[n_doms++] = get_bb_original (entry->dest);
4542 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4543 free (doms);
4545 /* Add the other PHI node arguments. */
4546 add_phi_args_after_copy (region_copy, n_region);
4548 /* Update the SSA web. */
4549 update_ssa (TODO_update_ssa);
4551 if (free_region_copy)
4552 free (region_copy);
4554 free_original_copy_tables ();
4555 return true;
4559 DEF_VEC_P(basic_block);
4560 DEF_VEC_ALLOC_P(basic_block,heap);
4563 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4564 adding blocks when the dominator traversal reaches EXIT. This
4565 function silently assumes that ENTRY strictly dominates EXIT. */
4567 static void
4568 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4569 VEC(basic_block,heap) **bbs_p)
4571 basic_block son;
4573 for (son = first_dom_son (CDI_DOMINATORS, entry);
4574 son;
4575 son = next_dom_son (CDI_DOMINATORS, son))
4577 VEC_safe_push (basic_block, heap, *bbs_p, son);
4578 if (son != exit)
4579 gather_blocks_in_sese_region (son, exit, bbs_p);
4584 struct move_stmt_d
4586 tree block;
4587 tree from_context;
4588 tree to_context;
4589 bitmap vars_to_remove;
4590 htab_t new_label_map;
4591 bool remap_decls_p;
4594 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4595 contained in *TP and change the DECL_CONTEXT of every local
4596 variable referenced in *TP. */
4598 static tree
4599 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4601 struct move_stmt_d *p = (struct move_stmt_d *) data;
4602 tree t = *tp;
4604 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4605 TREE_BLOCK (t) = p->block;
4607 if (OMP_DIRECTIVE_P (t)
4608 && TREE_CODE (t) != OMP_RETURN
4609 && TREE_CODE (t) != OMP_CONTINUE)
4611 /* Do not remap variables inside OMP directives. Variables
4612 referenced in clauses and directive header belong to the
4613 parent function and should not be moved into the child
4614 function. */
4615 bool save_remap_decls_p = p->remap_decls_p;
4616 p->remap_decls_p = false;
4617 *walk_subtrees = 0;
4619 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4621 p->remap_decls_p = save_remap_decls_p;
4623 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4625 if (TREE_CODE (t) == LABEL_DECL)
4627 if (p->new_label_map)
4629 struct tree_map in, *out;
4630 in.from = t;
4631 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4632 if (out)
4633 *tp = t = out->to;
4636 DECL_CONTEXT (t) = p->to_context;
4638 else if (p->remap_decls_p)
4640 DECL_CONTEXT (t) = p->to_context;
4642 if (TREE_CODE (t) == VAR_DECL)
4644 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4645 f->unexpanded_var_list
4646 = tree_cons (0, t, f->unexpanded_var_list);
4648 /* Mark T to be removed from the original function,
4649 otherwise it will be given a DECL_RTL when the
4650 original function is expanded. */
4651 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4655 else if (TYPE_P (t))
4656 *walk_subtrees = 0;
4658 return NULL_TREE;
4662 /* Move basic block BB from function CFUN to function DEST_FN. The
4663 block is moved out of the original linked list and placed after
4664 block AFTER in the new list. Also, the block is removed from the
4665 original array of blocks and placed in DEST_FN's array of blocks.
4666 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4667 updated to reflect the moved edges.
4669 On exit, local variables that need to be removed from
4670 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4672 static void
4673 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4674 basic_block after, bool update_edge_count_p,
4675 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4677 struct control_flow_graph *cfg;
4678 edge_iterator ei;
4679 edge e;
4680 block_stmt_iterator si;
4681 struct move_stmt_d d;
4682 unsigned old_len, new_len;
4683 basic_block *addr;
4685 /* Link BB to the new linked list. */
4686 move_block_after (bb, after);
4688 /* Update the edge count in the corresponding flowgraphs. */
4689 if (update_edge_count_p)
4690 FOR_EACH_EDGE (e, ei, bb->succs)
4692 cfun->cfg->x_n_edges--;
4693 dest_cfun->cfg->x_n_edges++;
4696 /* Remove BB from the original basic block array. */
4697 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4698 cfun->cfg->x_n_basic_blocks--;
4700 /* Grow DEST_CFUN's basic block array if needed. */
4701 cfg = dest_cfun->cfg;
4702 cfg->x_n_basic_blocks++;
4703 if (bb->index > cfg->x_last_basic_block)
4704 cfg->x_last_basic_block = bb->index;
4706 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4707 if ((unsigned) cfg->x_last_basic_block >= old_len)
4709 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4710 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4711 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4712 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4715 VEC_replace (basic_block, cfg->x_basic_block_info,
4716 cfg->x_last_basic_block, bb);
4718 /* The statements in BB need to be associated with a new TREE_BLOCK.
4719 Labels need to be associated with a new label-to-block map. */
4720 memset (&d, 0, sizeof (d));
4721 d.vars_to_remove = vars_to_remove;
4723 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4725 tree stmt = bsi_stmt (si);
4726 int region;
4728 d.from_context = cfun->decl;
4729 d.to_context = dest_cfun->decl;
4730 d.remap_decls_p = true;
4731 d.new_label_map = new_label_map;
4732 if (TREE_BLOCK (stmt))
4733 d.block = DECL_INITIAL (dest_cfun->decl);
4735 walk_tree (&stmt, move_stmt_r, &d, NULL);
4737 if (TREE_CODE (stmt) == LABEL_EXPR)
4739 tree label = LABEL_EXPR_LABEL (stmt);
4740 int uid = LABEL_DECL_UID (label);
4742 gcc_assert (uid > -1);
4744 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4745 if (old_len <= (unsigned) uid)
4747 new_len = 3 * uid / 2;
4748 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4749 new_len);
4750 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4751 memset (&addr[old_len], 0,
4752 sizeof (basic_block) * (new_len - old_len));
4755 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4756 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4758 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4760 if (uid >= dest_cfun->last_label_uid)
4761 dest_cfun->last_label_uid = uid + 1;
4763 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4764 TREE_OPERAND (stmt, 0) =
4765 build_int_cst (NULL_TREE,
4766 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4767 + eh_offset);
4769 region = lookup_stmt_eh_region (stmt);
4770 if (region >= 0)
4772 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4773 remove_stmt_from_eh_region (stmt);
4778 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4779 the outermost EH region. Use REGION as the incoming base EH region. */
4781 static int
4782 find_outermost_region_in_block (struct function *src_cfun,
4783 basic_block bb, int region)
4785 block_stmt_iterator si;
4787 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4789 tree stmt = bsi_stmt (si);
4790 int stmt_region;
4792 if (TREE_CODE (stmt) == RESX_EXPR)
4793 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4794 else
4795 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4796 if (stmt_region > 0)
4798 if (region < 0)
4799 region = stmt_region;
4800 else if (stmt_region != region)
4802 region = eh_region_outermost (src_cfun, stmt_region, region);
4803 gcc_assert (region != -1);
4808 return region;
4811 static tree
4812 new_label_mapper (tree decl, void *data)
4814 htab_t hash = (htab_t) data;
4815 struct tree_map *m;
4816 void **slot;
4818 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4820 m = xmalloc (sizeof (struct tree_map));
4821 m->hash = DECL_UID (decl);
4822 m->from = decl;
4823 m->to = create_artificial_label ();
4824 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4826 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4827 gcc_assert (*slot == NULL);
4829 *slot = m;
4831 return m->to;
4834 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4835 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4836 single basic block in the original CFG and the new basic block is
4837 returned. DEST_CFUN must not have a CFG yet.
4839 Note that the region need not be a pure SESE region. Blocks inside
4840 the region may contain calls to abort/exit. The only restriction
4841 is that ENTRY_BB should be the only entry point and it must
4842 dominate EXIT_BB.
4844 All local variables referenced in the region are assumed to be in
4845 the corresponding BLOCK_VARS and unexpanded variable lists
4846 associated with DEST_CFUN. */
4848 basic_block
4849 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4850 basic_block exit_bb)
4852 VEC(basic_block,heap) *bbs;
4853 basic_block after, bb, *entry_pred, *exit_succ;
4854 struct function *saved_cfun;
4855 int *entry_flag, *exit_flag, eh_offset;
4856 unsigned i, num_entry_edges, num_exit_edges;
4857 edge e;
4858 edge_iterator ei;
4859 bitmap vars_to_remove;
4860 htab_t new_label_map;
4862 saved_cfun = cfun;
4864 /* Collect all the blocks in the region. Manually add ENTRY_BB
4865 because it won't be added by dfs_enumerate_from. */
4866 calculate_dominance_info (CDI_DOMINATORS);
4868 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4869 region. */
4870 gcc_assert (entry_bb != exit_bb
4871 && (!exit_bb
4872 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4874 bbs = NULL;
4875 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4876 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4878 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4879 the predecessor edges to ENTRY_BB and the successor edges to
4880 EXIT_BB so that we can re-attach them to the new basic block that
4881 will replace the region. */
4882 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4883 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4884 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4885 i = 0;
4886 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4888 entry_flag[i] = e->flags;
4889 entry_pred[i++] = e->src;
4890 remove_edge (e);
4893 if (exit_bb)
4895 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4896 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4897 sizeof (basic_block));
4898 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4899 i = 0;
4900 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4902 exit_flag[i] = e->flags;
4903 exit_succ[i++] = e->dest;
4904 remove_edge (e);
4907 else
4909 num_exit_edges = 0;
4910 exit_succ = NULL;
4911 exit_flag = NULL;
4914 /* Switch context to the child function to initialize DEST_FN's CFG. */
4915 gcc_assert (dest_cfun->cfg == NULL);
4916 cfun = dest_cfun;
4918 init_empty_tree_cfg ();
4920 /* Initialize EH information for the new function. */
4921 eh_offset = 0;
4922 new_label_map = NULL;
4923 if (saved_cfun->eh)
4925 int region = -1;
4927 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4928 region = find_outermost_region_in_block (saved_cfun, bb, region);
4930 init_eh_for_function ();
4931 if (region != -1)
4933 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4934 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4935 new_label_map, region, 0);
4939 cfun = saved_cfun;
4941 /* Move blocks from BBS into DEST_CFUN. */
4942 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4943 after = dest_cfun->cfg->x_entry_block_ptr;
4944 vars_to_remove = BITMAP_ALLOC (NULL);
4945 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4947 /* No need to update edge counts on the last block. It has
4948 already been updated earlier when we detached the region from
4949 the original CFG. */
4950 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4951 new_label_map, eh_offset);
4952 after = bb;
4955 if (new_label_map)
4956 htab_delete (new_label_map);
4958 /* Remove the variables marked in VARS_TO_REMOVE from
4959 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4960 DECL_RTL in the context of CFUN. */
4961 if (!bitmap_empty_p (vars_to_remove))
4963 tree *p;
4965 for (p = &cfun->unexpanded_var_list; *p; )
4967 tree var = TREE_VALUE (*p);
4968 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4970 *p = TREE_CHAIN (*p);
4971 continue;
4974 p = &TREE_CHAIN (*p);
4978 BITMAP_FREE (vars_to_remove);
4980 /* Rewire the entry and exit blocks. The successor to the entry
4981 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4982 the child function. Similarly, the predecessor of DEST_FN's
4983 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4984 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4985 various CFG manipulation function get to the right CFG.
4987 FIXME, this is silly. The CFG ought to become a parameter to
4988 these helpers. */
4989 cfun = dest_cfun;
4990 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4991 if (exit_bb)
4992 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4993 cfun = saved_cfun;
4995 /* Back in the original function, the SESE region has disappeared,
4996 create a new basic block in its place. */
4997 bb = create_empty_bb (entry_pred[0]);
4998 for (i = 0; i < num_entry_edges; i++)
4999 make_edge (entry_pred[i], bb, entry_flag[i]);
5001 for (i = 0; i < num_exit_edges; i++)
5002 make_edge (bb, exit_succ[i], exit_flag[i]);
5004 if (exit_bb)
5006 free (exit_flag);
5007 free (exit_succ);
5009 free (entry_flag);
5010 free (entry_pred);
5011 free_dominance_info (CDI_DOMINATORS);
5012 free_dominance_info (CDI_POST_DOMINATORS);
5013 VEC_free (basic_block, heap, bbs);
5015 return bb;
5019 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5021 void
5022 dump_function_to_file (tree fn, FILE *file, int flags)
5024 tree arg, vars, var;
5025 bool ignore_topmost_bind = false, any_var = false;
5026 basic_block bb;
5027 tree chain;
5028 struct function *saved_cfun;
5030 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5032 arg = DECL_ARGUMENTS (fn);
5033 while (arg)
5035 print_generic_expr (file, arg, dump_flags);
5036 if (TREE_CHAIN (arg))
5037 fprintf (file, ", ");
5038 arg = TREE_CHAIN (arg);
5040 fprintf (file, ")\n");
5042 if (flags & TDF_DETAILS)
5043 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5044 if (flags & TDF_RAW)
5046 dump_node (fn, TDF_SLIM | flags, file);
5047 return;
5050 /* Switch CFUN to point to FN. */
5051 saved_cfun = cfun;
5052 cfun = DECL_STRUCT_FUNCTION (fn);
5054 /* When GIMPLE is lowered, the variables are no longer available in
5055 BIND_EXPRs, so display them separately. */
5056 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5058 ignore_topmost_bind = true;
5060 fprintf (file, "{\n");
5061 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5063 var = TREE_VALUE (vars);
5065 print_generic_decl (file, var, flags);
5066 fprintf (file, "\n");
5068 any_var = true;
5072 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5074 /* Make a CFG based dump. */
5075 check_bb_profile (ENTRY_BLOCK_PTR, file);
5076 if (!ignore_topmost_bind)
5077 fprintf (file, "{\n");
5079 if (any_var && n_basic_blocks)
5080 fprintf (file, "\n");
5082 FOR_EACH_BB (bb)
5083 dump_generic_bb (file, bb, 2, flags);
5085 fprintf (file, "}\n");
5086 check_bb_profile (EXIT_BLOCK_PTR, file);
5088 else
5090 int indent;
5092 /* Make a tree based dump. */
5093 chain = DECL_SAVED_TREE (fn);
5095 if (chain && TREE_CODE (chain) == BIND_EXPR)
5097 if (ignore_topmost_bind)
5099 chain = BIND_EXPR_BODY (chain);
5100 indent = 2;
5102 else
5103 indent = 0;
5105 else
5107 if (!ignore_topmost_bind)
5108 fprintf (file, "{\n");
5109 indent = 2;
5112 if (any_var)
5113 fprintf (file, "\n");
5115 print_generic_stmt_indented (file, chain, flags, indent);
5116 if (ignore_topmost_bind)
5117 fprintf (file, "}\n");
5120 fprintf (file, "\n\n");
5122 /* Restore CFUN. */
5123 cfun = saved_cfun;
5127 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5129 void
5130 debug_function (tree fn, int flags)
5132 dump_function_to_file (fn, stderr, flags);
5136 /* Pretty print of the loops intermediate representation. */
5137 static void print_loop (FILE *, struct loop *, int);
5138 static void print_pred_bbs (FILE *, basic_block bb);
5139 static void print_succ_bbs (FILE *, basic_block bb);
5142 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5144 static void
5145 print_pred_bbs (FILE *file, basic_block bb)
5147 edge e;
5148 edge_iterator ei;
5150 FOR_EACH_EDGE (e, ei, bb->preds)
5151 fprintf (file, "bb_%d ", e->src->index);
5155 /* Print on FILE the indexes for the successors of basic_block BB. */
5157 static void
5158 print_succ_bbs (FILE *file, basic_block bb)
5160 edge e;
5161 edge_iterator ei;
5163 FOR_EACH_EDGE (e, ei, bb->succs)
5164 fprintf (file, "bb_%d ", e->dest->index);
5168 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5170 static void
5171 print_loop (FILE *file, struct loop *loop, int indent)
5173 char *s_indent;
5174 basic_block bb;
5176 if (loop == NULL)
5177 return;
5179 s_indent = (char *) alloca ((size_t) indent + 1);
5180 memset ((void *) s_indent, ' ', (size_t) indent);
5181 s_indent[indent] = '\0';
5183 /* Print the loop's header. */
5184 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5186 /* Print the loop's body. */
5187 fprintf (file, "%s{\n", s_indent);
5188 FOR_EACH_BB (bb)
5189 if (bb->loop_father == loop)
5191 /* Print the basic_block's header. */
5192 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5193 print_pred_bbs (file, bb);
5194 fprintf (file, "}, succs = {");
5195 print_succ_bbs (file, bb);
5196 fprintf (file, "})\n");
5198 /* Print the basic_block's body. */
5199 fprintf (file, "%s {\n", s_indent);
5200 tree_dump_bb (bb, file, indent + 4);
5201 fprintf (file, "%s }\n", s_indent);
5204 print_loop (file, loop->inner, indent + 2);
5205 fprintf (file, "%s}\n", s_indent);
5206 print_loop (file, loop->next, indent);
5210 /* Follow a CFG edge from the entry point of the program, and on entry
5211 of a loop, pretty print the loop structure on FILE. */
5213 void
5214 print_loop_ir (FILE *file)
5216 basic_block bb;
5218 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5219 if (bb && bb->loop_father)
5220 print_loop (file, bb->loop_father, 0);
5224 /* Debugging loops structure at tree level. */
5226 void
5227 debug_loop_ir (void)
5229 print_loop_ir (stderr);
5233 /* Return true if BB ends with a call, possibly followed by some
5234 instructions that must stay with the call. Return false,
5235 otherwise. */
5237 static bool
5238 tree_block_ends_with_call_p (basic_block bb)
5240 block_stmt_iterator bsi = bsi_last (bb);
5241 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5245 /* Return true if BB ends with a conditional branch. Return false,
5246 otherwise. */
5248 static bool
5249 tree_block_ends_with_condjump_p (basic_block bb)
5251 tree stmt = last_stmt (bb);
5252 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5256 /* Return true if we need to add fake edge to exit at statement T.
5257 Helper function for tree_flow_call_edges_add. */
5259 static bool
5260 need_fake_edge_p (tree t)
5262 tree call;
5264 /* NORETURN and LONGJMP calls already have an edge to exit.
5265 CONST and PURE calls do not need one.
5266 We don't currently check for CONST and PURE here, although
5267 it would be a good idea, because those attributes are
5268 figured out from the RTL in mark_constant_function, and
5269 the counter incrementation code from -fprofile-arcs
5270 leads to different results from -fbranch-probabilities. */
5271 call = get_call_expr_in (t);
5272 if (call
5273 && !(call_expr_flags (call) & ECF_NORETURN))
5274 return true;
5276 if (TREE_CODE (t) == ASM_EXPR
5277 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5278 return true;
5280 return false;
5284 /* Add fake edges to the function exit for any non constant and non
5285 noreturn calls, volatile inline assembly in the bitmap of blocks
5286 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5287 the number of blocks that were split.
5289 The goal is to expose cases in which entering a basic block does
5290 not imply that all subsequent instructions must be executed. */
5292 static int
5293 tree_flow_call_edges_add (sbitmap blocks)
5295 int i;
5296 int blocks_split = 0;
5297 int last_bb = last_basic_block;
5298 bool check_last_block = false;
5300 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5301 return 0;
5303 if (! blocks)
5304 check_last_block = true;
5305 else
5306 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5308 /* In the last basic block, before epilogue generation, there will be
5309 a fallthru edge to EXIT. Special care is required if the last insn
5310 of the last basic block is a call because make_edge folds duplicate
5311 edges, which would result in the fallthru edge also being marked
5312 fake, which would result in the fallthru edge being removed by
5313 remove_fake_edges, which would result in an invalid CFG.
5315 Moreover, we can't elide the outgoing fake edge, since the block
5316 profiler needs to take this into account in order to solve the minimal
5317 spanning tree in the case that the call doesn't return.
5319 Handle this by adding a dummy instruction in a new last basic block. */
5320 if (check_last_block)
5322 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5323 block_stmt_iterator bsi = bsi_last (bb);
5324 tree t = NULL_TREE;
5325 if (!bsi_end_p (bsi))
5326 t = bsi_stmt (bsi);
5328 if (t && need_fake_edge_p (t))
5330 edge e;
5332 e = find_edge (bb, EXIT_BLOCK_PTR);
5333 if (e)
5335 bsi_insert_on_edge (e, build_empty_stmt ());
5336 bsi_commit_edge_inserts ();
5341 /* Now add fake edges to the function exit for any non constant
5342 calls since there is no way that we can determine if they will
5343 return or not... */
5344 for (i = 0; i < last_bb; i++)
5346 basic_block bb = BASIC_BLOCK (i);
5347 block_stmt_iterator bsi;
5348 tree stmt, last_stmt;
5350 if (!bb)
5351 continue;
5353 if (blocks && !TEST_BIT (blocks, i))
5354 continue;
5356 bsi = bsi_last (bb);
5357 if (!bsi_end_p (bsi))
5359 last_stmt = bsi_stmt (bsi);
5362 stmt = bsi_stmt (bsi);
5363 if (need_fake_edge_p (stmt))
5365 edge e;
5366 /* The handling above of the final block before the
5367 epilogue should be enough to verify that there is
5368 no edge to the exit block in CFG already.
5369 Calling make_edge in such case would cause us to
5370 mark that edge as fake and remove it later. */
5371 #ifdef ENABLE_CHECKING
5372 if (stmt == last_stmt)
5374 e = find_edge (bb, EXIT_BLOCK_PTR);
5375 gcc_assert (e == NULL);
5377 #endif
5379 /* Note that the following may create a new basic block
5380 and renumber the existing basic blocks. */
5381 if (stmt != last_stmt)
5383 e = split_block (bb, stmt);
5384 if (e)
5385 blocks_split++;
5387 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5389 bsi_prev (&bsi);
5391 while (!bsi_end_p (bsi));
5395 if (blocks_split)
5396 verify_flow_info ();
5398 return blocks_split;
5401 /* Purge dead abnormal call edges from basic block BB. */
5403 bool
5404 tree_purge_dead_abnormal_call_edges (basic_block bb)
5406 bool changed = tree_purge_dead_eh_edges (bb);
5408 if (current_function_has_nonlocal_label)
5410 tree stmt = last_stmt (bb);
5411 edge_iterator ei;
5412 edge e;
5414 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5415 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5417 if (e->flags & EDGE_ABNORMAL)
5419 remove_edge (e);
5420 changed = true;
5422 else
5423 ei_next (&ei);
5426 /* See tree_purge_dead_eh_edges below. */
5427 if (changed)
5428 free_dominance_info (CDI_DOMINATORS);
5431 return changed;
5434 /* Purge dead EH edges from basic block BB. */
5436 bool
5437 tree_purge_dead_eh_edges (basic_block bb)
5439 bool changed = false;
5440 edge e;
5441 edge_iterator ei;
5442 tree stmt = last_stmt (bb);
5444 if (stmt && tree_can_throw_internal (stmt))
5445 return false;
5447 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5449 if (e->flags & EDGE_EH)
5451 remove_edge (e);
5452 changed = true;
5454 else
5455 ei_next (&ei);
5458 /* Removal of dead EH edges might change dominators of not
5459 just immediate successors. E.g. when bb1 is changed so that
5460 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5461 eh edges purged by this function in:
5465 1-->2
5466 / \ |
5467 v v |
5468 3-->4 |
5470 --->5
5473 idom(bb5) must be recomputed. For now just free the dominance
5474 info. */
5475 if (changed)
5476 free_dominance_info (CDI_DOMINATORS);
5478 return changed;
5481 bool
5482 tree_purge_all_dead_eh_edges (bitmap blocks)
5484 bool changed = false;
5485 unsigned i;
5486 bitmap_iterator bi;
5488 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5490 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5493 return changed;
5496 /* This function is called whenever a new edge is created or
5497 redirected. */
5499 static void
5500 tree_execute_on_growing_pred (edge e)
5502 basic_block bb = e->dest;
5504 if (phi_nodes (bb))
5505 reserve_phi_args_for_new_edge (bb);
5508 /* This function is called immediately before edge E is removed from
5509 the edge vector E->dest->preds. */
5511 static void
5512 tree_execute_on_shrinking_pred (edge e)
5514 if (phi_nodes (e->dest))
5515 remove_phi_args (e);
5518 /*---------------------------------------------------------------------------
5519 Helper functions for Loop versioning
5520 ---------------------------------------------------------------------------*/
5522 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5523 of 'first'. Both of them are dominated by 'new_head' basic block. When
5524 'new_head' was created by 'second's incoming edge it received phi arguments
5525 on the edge by split_edge(). Later, additional edge 'e' was created to
5526 connect 'new_head' and 'first'. Now this routine adds phi args on this
5527 additional edge 'e' that new_head to second edge received as part of edge
5528 splitting.
5531 static void
5532 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5533 basic_block new_head, edge e)
5535 tree phi1, phi2;
5536 edge e2 = find_edge (new_head, second);
5538 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5539 edge, we should always have an edge from NEW_HEAD to SECOND. */
5540 gcc_assert (e2 != NULL);
5542 /* Browse all 'second' basic block phi nodes and add phi args to
5543 edge 'e' for 'first' head. PHI args are always in correct order. */
5545 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5546 phi2 && phi1;
5547 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5549 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5550 add_phi_arg (phi1, def, e);
5554 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5555 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5556 the destination of the ELSE part. */
5557 static void
5558 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5559 basic_block cond_bb, void *cond_e)
5561 block_stmt_iterator bsi;
5562 tree goto1 = NULL_TREE;
5563 tree goto2 = NULL_TREE;
5564 tree new_cond_expr = NULL_TREE;
5565 tree cond_expr = (tree) cond_e;
5566 edge e0;
5568 /* Build new conditional expr */
5569 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5570 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5571 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5573 /* Add new cond in cond_bb. */
5574 bsi = bsi_start (cond_bb);
5575 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5576 /* Adjust edges appropriately to connect new head with first head
5577 as well as second head. */
5578 e0 = single_succ_edge (cond_bb);
5579 e0->flags &= ~EDGE_FALLTHRU;
5580 e0->flags |= EDGE_FALSE_VALUE;
5583 struct cfg_hooks tree_cfg_hooks = {
5584 "tree",
5585 tree_verify_flow_info,
5586 tree_dump_bb, /* dump_bb */
5587 create_bb, /* create_basic_block */
5588 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5589 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5590 remove_bb, /* delete_basic_block */
5591 tree_split_block, /* split_block */
5592 tree_move_block_after, /* move_block_after */
5593 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5594 tree_merge_blocks, /* merge_blocks */
5595 tree_predict_edge, /* predict_edge */
5596 tree_predicted_by_p, /* predicted_by_p */
5597 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5598 tree_duplicate_bb, /* duplicate_block */
5599 tree_split_edge, /* split_edge */
5600 tree_make_forwarder_block, /* make_forward_block */
5601 NULL, /* tidy_fallthru_edge */
5602 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5603 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5604 tree_flow_call_edges_add, /* flow_call_edges_add */
5605 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5606 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5607 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5608 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5609 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5610 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5611 flush_pending_stmts /* flush_pending_stmts */
5615 /* Split all critical edges. */
5617 static unsigned int
5618 split_critical_edges (void)
5620 basic_block bb;
5621 edge e;
5622 edge_iterator ei;
5624 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5625 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5626 mappings around the calls to split_edge. */
5627 start_recording_case_labels ();
5628 FOR_ALL_BB (bb)
5630 FOR_EACH_EDGE (e, ei, bb->succs)
5631 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5633 split_edge (e);
5636 end_recording_case_labels ();
5637 return 0;
5640 struct tree_opt_pass pass_split_crit_edges =
5642 "crited", /* name */
5643 NULL, /* gate */
5644 split_critical_edges, /* execute */
5645 NULL, /* sub */
5646 NULL, /* next */
5647 0, /* static_pass_number */
5648 TV_TREE_SPLIT_EDGES, /* tv_id */
5649 PROP_cfg, /* properties required */
5650 PROP_no_crit_edges, /* properties_provided */
5651 0, /* properties_destroyed */
5652 0, /* todo_flags_start */
5653 TODO_dump_func, /* todo_flags_finish */
5654 0 /* letter */
5658 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5659 a temporary, make sure and register it to be renamed if necessary,
5660 and finally return the temporary. Put the statements to compute
5661 EXP before the current statement in BSI. */
5663 tree
5664 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5666 tree t, new_stmt, orig_stmt;
5668 if (is_gimple_val (exp))
5669 return exp;
5671 t = make_rename_temp (type, NULL);
5672 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5674 orig_stmt = bsi_stmt (*bsi);
5675 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5676 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5678 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5679 if (in_ssa_p)
5680 mark_new_vars_to_rename (new_stmt);
5682 return t;
5685 /* Build a ternary operation and gimplify it. Emit code before BSI.
5686 Return the gimple_val holding the result. */
5688 tree
5689 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5690 tree type, tree a, tree b, tree c)
5692 tree ret;
5694 ret = fold_build3 (code, type, a, b, c);
5695 STRIP_NOPS (ret);
5697 return gimplify_val (bsi, type, ret);
5700 /* Build a binary operation and gimplify it. Emit code before BSI.
5701 Return the gimple_val holding the result. */
5703 tree
5704 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5705 tree type, tree a, tree b)
5707 tree ret;
5709 ret = fold_build2 (code, type, a, b);
5710 STRIP_NOPS (ret);
5712 return gimplify_val (bsi, type, ret);
5715 /* Build a unary operation and gimplify it. Emit code before BSI.
5716 Return the gimple_val holding the result. */
5718 tree
5719 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5720 tree a)
5722 tree ret;
5724 ret = fold_build1 (code, type, a);
5725 STRIP_NOPS (ret);
5727 return gimplify_val (bsi, type, ret);
5732 /* Emit return warnings. */
5734 static unsigned int
5735 execute_warn_function_return (void)
5737 #ifdef USE_MAPPED_LOCATION
5738 source_location location;
5739 #else
5740 location_t *locus;
5741 #endif
5742 tree last;
5743 edge e;
5744 edge_iterator ei;
5746 /* If we have a path to EXIT, then we do return. */
5747 if (TREE_THIS_VOLATILE (cfun->decl)
5748 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5750 #ifdef USE_MAPPED_LOCATION
5751 location = UNKNOWN_LOCATION;
5752 #else
5753 locus = NULL;
5754 #endif
5755 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5757 last = last_stmt (e->src);
5758 if (TREE_CODE (last) == RETURN_EXPR
5759 #ifdef USE_MAPPED_LOCATION
5760 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5761 #else
5762 && (locus = EXPR_LOCUS (last)) != NULL)
5763 #endif
5764 break;
5766 #ifdef USE_MAPPED_LOCATION
5767 if (location == UNKNOWN_LOCATION)
5768 location = cfun->function_end_locus;
5769 warning (0, "%H%<noreturn%> function does return", &location);
5770 #else
5771 if (!locus)
5772 locus = &cfun->function_end_locus;
5773 warning (0, "%H%<noreturn%> function does return", locus);
5774 #endif
5777 /* If we see "return;" in some basic block, then we do reach the end
5778 without returning a value. */
5779 else if (warn_return_type
5780 && !TREE_NO_WARNING (cfun->decl)
5781 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5782 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5784 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5786 tree last = last_stmt (e->src);
5787 if (TREE_CODE (last) == RETURN_EXPR
5788 && TREE_OPERAND (last, 0) == NULL
5789 && !TREE_NO_WARNING (last))
5791 #ifdef USE_MAPPED_LOCATION
5792 location = EXPR_LOCATION (last);
5793 if (location == UNKNOWN_LOCATION)
5794 location = cfun->function_end_locus;
5795 warning (0, "%Hcontrol reaches end of non-void function", &location);
5796 #else
5797 locus = EXPR_LOCUS (last);
5798 if (!locus)
5799 locus = &cfun->function_end_locus;
5800 warning (0, "%Hcontrol reaches end of non-void function", locus);
5801 #endif
5802 TREE_NO_WARNING (cfun->decl) = 1;
5803 break;
5807 return 0;
5811 /* Given a basic block B which ends with a conditional and has
5812 precisely two successors, determine which of the edges is taken if
5813 the conditional is true and which is taken if the conditional is
5814 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5816 void
5817 extract_true_false_edges_from_block (basic_block b,
5818 edge *true_edge,
5819 edge *false_edge)
5821 edge e = EDGE_SUCC (b, 0);
5823 if (e->flags & EDGE_TRUE_VALUE)
5825 *true_edge = e;
5826 *false_edge = EDGE_SUCC (b, 1);
5828 else
5830 *false_edge = e;
5831 *true_edge = EDGE_SUCC (b, 1);
5835 struct tree_opt_pass pass_warn_function_return =
5837 NULL, /* name */
5838 NULL, /* gate */
5839 execute_warn_function_return, /* execute */
5840 NULL, /* sub */
5841 NULL, /* next */
5842 0, /* static_pass_number */
5843 0, /* tv_id */
5844 PROP_cfg, /* properties_required */
5845 0, /* properties_provided */
5846 0, /* properties_destroyed */
5847 0, /* todo_flags_start */
5848 0, /* todo_flags_finish */
5849 0 /* letter */
5852 /* Emit noreturn warnings. */
5854 static unsigned int
5855 execute_warn_function_noreturn (void)
5857 if (warn_missing_noreturn
5858 && !TREE_THIS_VOLATILE (cfun->decl)
5859 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5860 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5861 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5862 "for attribute %<noreturn%>",
5863 cfun->decl);
5864 return 0;
5867 struct tree_opt_pass pass_warn_function_noreturn =
5869 NULL, /* name */
5870 NULL, /* gate */
5871 execute_warn_function_noreturn, /* execute */
5872 NULL, /* sub */
5873 NULL, /* next */
5874 0, /* static_pass_number */
5875 0, /* tv_id */
5876 PROP_cfg, /* properties_required */
5877 0, /* properties_provided */
5878 0, /* properties_destroyed */
5879 0, /* todo_flags_start */
5880 0, /* todo_flags_finish */
5881 0 /* letter */