2006-08-07 Andrew John Hughes <gnu_andrew@member.fsf.org>
[official-gcc.git] / gcc / tree-cfg.c
bloba3fed03956fb1ea723e99f31339592576ffbf2f8
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 struct edge_to_cases_elt
73 /* The edge itself. Necessary for hashing and equality tests. */
74 edge e;
76 /* The case labels associated with this edge. We link these up via
77 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78 when we destroy the hash table. This prevents problems when copying
79 SWITCH_EXPRs. */
80 tree case_labels;
83 static htab_t edge_to_cases;
85 /* CFG statistics. */
86 struct cfg_stats_d
88 long num_merged_labels;
91 static struct cfg_stats_d cfg_stats;
93 /* Nonzero if we found a computed goto while building basic blocks. */
94 static bool found_computed_goto;
96 /* Basic blocks and flowgraphs. */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
116 /* Flowgraph optimization and cleanup. */
117 static void tree_merge_blocks (basic_block, basic_block);
118 static bool tree_can_merge_blocks_p (basic_block, basic_block);
119 static void remove_bb (basic_block);
120 static edge find_taken_edge_computed_goto (basic_block, tree);
121 static edge find_taken_edge_cond_expr (basic_block, tree);
122 static edge find_taken_edge_switch_expr (basic_block, tree);
123 static tree find_case_label_for_value (tree, tree);
125 void
126 init_empty_tree_cfg (void)
128 /* Initialize the basic block array. */
129 init_flow ();
130 profile_status = PROFILE_ABSENT;
131 n_basic_blocks = NUM_FIXED_BLOCKS;
132 last_basic_block = NUM_FIXED_BLOCKS;
133 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
134 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
135 memset (VEC_address (basic_block, basic_block_info), 0,
136 sizeof (basic_block) * initial_cfg_capacity);
138 /* Build a mapping of labels to their associated blocks. */
139 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
140 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
141 memset (VEC_address (basic_block, label_to_block_map),
142 0, sizeof (basic_block) * initial_cfg_capacity);
144 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
145 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
146 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
147 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
150 /*---------------------------------------------------------------------------
151 Create basic blocks
152 ---------------------------------------------------------------------------*/
154 /* Entry point to the CFG builder for trees. TP points to the list of
155 statements to be added to the flowgraph. */
157 static void
158 build_tree_cfg (tree *tp)
160 /* Register specific tree functions. */
161 tree_register_cfg_hooks ();
163 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165 init_empty_tree_cfg ();
167 found_computed_goto = 0;
168 make_blocks (*tp);
170 /* Computed gotos are hell to deal with, especially if there are
171 lots of them with a large number of destinations. So we factor
172 them to a common computed goto location before we build the
173 edge list. After we convert back to normal form, we will un-factor
174 the computed gotos since factoring introduces an unwanted jump. */
175 if (found_computed_goto)
176 factor_computed_gotos ();
178 /* Make sure there is always at least one block, even if it's empty. */
179 if (n_basic_blocks == NUM_FIXED_BLOCKS)
180 create_empty_bb (ENTRY_BLOCK_PTR);
182 /* Adjust the size of the array. */
183 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 size_t old_size = VEC_length (basic_block, basic_block_info);
186 basic_block *p;
187 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
188 p = VEC_address (basic_block, basic_block_info);
189 memset (&p[old_size], 0,
190 sizeof (basic_block) * (n_basic_blocks - old_size));
193 /* To speed up statement iterator walks, we first purge dead labels. */
194 cleanup_dead_labels ();
196 /* Group case nodes to reduce the number of edges.
197 We do this after cleaning up dead labels because otherwise we miss
198 a lot of obvious case merging opportunities. */
199 group_case_labels ();
201 /* Create the edges of the flowgraph. */
202 make_edges ();
204 /* Debugging dumps. */
206 /* Write the flowgraph to a VCG file. */
208 int local_dump_flags;
209 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
210 if (vcg_file)
212 tree_cfg2vcg (vcg_file);
213 dump_end (TDI_vcg, vcg_file);
217 #ifdef ENABLE_CHECKING
218 verify_stmts ();
219 #endif
221 /* Dump a textual representation of the flowgraph. */
222 if (dump_file)
223 dump_tree_cfg (dump_file, dump_flags);
226 static unsigned int
227 execute_build_cfg (void)
229 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
230 return 0;
233 struct tree_opt_pass pass_build_cfg =
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts, /* todo_flags_finish */
247 0 /* letter */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
253 normal form. */
255 static void
256 factor_computed_gotos (void)
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
268 FOR_EACH_BB (bb)
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
285 tree assignment;
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build2 (MODIFY_EXPR, ptr_type_node,
317 var, GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
329 static void
330 make_blocks (tree stmt_list)
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
338 while (!tsi_end_p (i))
340 tree prev_stmt;
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
368 tsi_next (&i);
369 first_stmt_of_list = false;
374 /* Create and return a new empty basic block after bb AFTER. */
376 static basic_block
377 create_bb (void *h, void *e, basic_block after)
379 basic_block bb;
381 gcc_assert (!e);
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
386 bb = alloc_block ();
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
390 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392 /* Add the new block to the linked list of blocks. */
393 link_block (bb, after);
395 /* Grow the basic block array if needed. */
396 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 size_t old_size = VEC_length (basic_block, basic_block_info);
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 basic_block *p;
401 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
402 p = VEC_address (basic_block, basic_block_info);
403 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
406 /* Add the newly created block to the array. */
407 SET_BASIC_BLOCK (last_basic_block, bb);
409 n_basic_blocks++;
410 last_basic_block++;
412 return bb;
416 /*---------------------------------------------------------------------------
417 Edge creation
418 ---------------------------------------------------------------------------*/
420 /* Fold COND_EXPR_COND of each COND_EXPR. */
422 void
423 fold_cond_expr_cond (void)
425 basic_block bb;
427 FOR_EACH_BB (bb)
429 tree stmt = last_stmt (bb);
431 if (stmt
432 && TREE_CODE (stmt) == COND_EXPR)
434 tree cond = fold (COND_EXPR_COND (stmt));
435 if (integer_zerop (cond))
436 COND_EXPR_COND (stmt) = boolean_false_node;
437 else if (integer_onep (cond))
438 COND_EXPR_COND (stmt) = boolean_true_node;
443 /* Join all the blocks in the flowgraph. */
445 static void
446 make_edges (void)
448 basic_block bb;
449 struct omp_region *cur_region = NULL;
451 /* Create an edge from entry to the first block with executable
452 statements in it. */
453 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
455 /* Traverse the basic block array placing edges. */
456 FOR_EACH_BB (bb)
458 tree last = last_stmt (bb);
459 bool fallthru;
461 if (last)
463 enum tree_code code = TREE_CODE (last);
464 switch (code)
466 case GOTO_EXPR:
467 make_goto_expr_edges (bb);
468 fallthru = false;
469 break;
470 case RETURN_EXPR:
471 make_edge (bb, EXIT_BLOCK_PTR, 0);
472 fallthru = false;
473 break;
474 case COND_EXPR:
475 make_cond_expr_edges (bb);
476 fallthru = false;
477 break;
478 case SWITCH_EXPR:
479 make_switch_expr_edges (bb);
480 fallthru = false;
481 break;
482 case RESX_EXPR:
483 make_eh_edges (last);
484 fallthru = false;
485 break;
487 case CALL_EXPR:
488 /* If this function receives a nonlocal goto, then we need to
489 make edges from this call site to all the nonlocal goto
490 handlers. */
491 if (TREE_SIDE_EFFECTS (last)
492 && current_function_has_nonlocal_label)
493 make_goto_expr_edges (bb);
495 /* If this statement has reachable exception handlers, then
496 create abnormal edges to them. */
497 make_eh_edges (last);
499 /* Some calls are known not to return. */
500 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
501 break;
503 case MODIFY_EXPR:
504 if (is_ctrl_altering_stmt (last))
506 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
507 CALL_EXPR may have an abnormal edge. Search the RHS for
508 this case and create any required edges. */
509 tree op = get_call_expr_in (last);
510 if (op && TREE_SIDE_EFFECTS (op)
511 && current_function_has_nonlocal_label)
512 make_goto_expr_edges (bb);
514 make_eh_edges (last);
516 fallthru = true;
517 break;
519 case OMP_PARALLEL:
520 case OMP_FOR:
521 case OMP_SINGLE:
522 case OMP_MASTER:
523 case OMP_ORDERED:
524 case OMP_CRITICAL:
525 case OMP_SECTION:
526 cur_region = new_omp_region (bb, code, cur_region);
527 fallthru = true;
528 break;
530 case OMP_SECTIONS:
531 cur_region = new_omp_region (bb, code, cur_region);
532 fallthru = false;
533 break;
535 case OMP_RETURN:
536 /* In the case of an OMP_SECTION, the edge will go somewhere
537 other than the next block. This will be created later. */
538 cur_region->exit = bb;
539 fallthru = cur_region->type != OMP_SECTION;
540 cur_region = cur_region->outer;
541 break;
543 case OMP_CONTINUE:
544 cur_region->cont = bb;
545 switch (cur_region->type)
547 case OMP_FOR:
548 /* ??? Technically there should be a some sort of loopback
549 edge here, but it goes to a block that doesn't exist yet,
550 and without it, updating the ssa form would be a real
551 bear. Fortunately, we don't yet do ssa before expanding
552 these nodes. */
553 break;
555 case OMP_SECTIONS:
556 /* Wire up the edges into and out of the nested sections. */
557 /* ??? Similarly wrt loopback. */
559 struct omp_region *i;
560 for (i = cur_region->inner; i ; i = i->next)
562 gcc_assert (i->type == OMP_SECTION);
563 make_edge (cur_region->entry, i->entry, 0);
564 make_edge (i->exit, bb, EDGE_FALLTHRU);
567 break;
569 default:
570 gcc_unreachable ();
572 fallthru = true;
573 break;
575 default:
576 gcc_assert (!stmt_ends_bb_p (last));
577 fallthru = true;
580 else
581 fallthru = true;
583 if (fallthru)
584 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
587 if (root_omp_region)
588 free_omp_regions ();
590 /* Fold COND_EXPR_COND of each COND_EXPR. */
591 fold_cond_expr_cond ();
593 /* Clean up the graph and warn for unreachable code. */
594 cleanup_tree_cfg ();
598 /* Create the edges for a COND_EXPR starting at block BB.
599 At this point, both clauses must contain only simple gotos. */
601 static void
602 make_cond_expr_edges (basic_block bb)
604 tree entry = last_stmt (bb);
605 basic_block then_bb, else_bb;
606 tree then_label, else_label;
607 edge e;
609 gcc_assert (entry);
610 gcc_assert (TREE_CODE (entry) == COND_EXPR);
612 /* Entry basic blocks for each component. */
613 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
614 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
615 then_bb = label_to_block (then_label);
616 else_bb = label_to_block (else_label);
618 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
619 #ifdef USE_MAPPED_LOCATION
620 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
621 #else
622 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
623 #endif
624 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
625 if (e)
627 #ifdef USE_MAPPED_LOCATION
628 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
629 #else
630 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
631 #endif
635 /* Hashing routine for EDGE_TO_CASES. */
637 static hashval_t
638 edge_to_cases_hash (const void *p)
640 edge e = ((struct edge_to_cases_elt *)p)->e;
642 /* Hash on the edge itself (which is a pointer). */
643 return htab_hash_pointer (e);
646 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
647 for equality is just a pointer comparison. */
649 static int
650 edge_to_cases_eq (const void *p1, const void *p2)
652 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
653 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
655 return e1 == e2;
658 /* Called for each element in the hash table (P) as we delete the
659 edge to cases hash table.
661 Clear all the TREE_CHAINs to prevent problems with copying of
662 SWITCH_EXPRs and structure sharing rules, then free the hash table
663 element. */
665 static void
666 edge_to_cases_cleanup (void *p)
668 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
669 tree t, next;
671 for (t = elt->case_labels; t; t = next)
673 next = TREE_CHAIN (t);
674 TREE_CHAIN (t) = NULL;
676 free (p);
679 /* Start recording information mapping edges to case labels. */
681 void
682 start_recording_case_labels (void)
684 gcc_assert (edge_to_cases == NULL);
686 edge_to_cases = htab_create (37,
687 edge_to_cases_hash,
688 edge_to_cases_eq,
689 edge_to_cases_cleanup);
692 /* Return nonzero if we are recording information for case labels. */
694 static bool
695 recording_case_labels_p (void)
697 return (edge_to_cases != NULL);
700 /* Stop recording information mapping edges to case labels and
701 remove any information we have recorded. */
702 void
703 end_recording_case_labels (void)
705 htab_delete (edge_to_cases);
706 edge_to_cases = NULL;
709 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
711 static void
712 record_switch_edge (edge e, tree case_label)
714 struct edge_to_cases_elt *elt;
715 void **slot;
717 /* Build a hash table element so we can see if E is already
718 in the table. */
719 elt = XNEW (struct edge_to_cases_elt);
720 elt->e = e;
721 elt->case_labels = case_label;
723 slot = htab_find_slot (edge_to_cases, elt, INSERT);
725 if (*slot == NULL)
727 /* E was not in the hash table. Install E into the hash table. */
728 *slot = (void *)elt;
730 else
732 /* E was already in the hash table. Free ELT as we do not need it
733 anymore. */
734 free (elt);
736 /* Get the entry stored in the hash table. */
737 elt = (struct edge_to_cases_elt *) *slot;
739 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
740 TREE_CHAIN (case_label) = elt->case_labels;
741 elt->case_labels = case_label;
745 /* If we are inside a {start,end}_recording_cases block, then return
746 a chain of CASE_LABEL_EXPRs from T which reference E.
748 Otherwise return NULL. */
750 static tree
751 get_cases_for_edge (edge e, tree t)
753 struct edge_to_cases_elt elt, *elt_p;
754 void **slot;
755 size_t i, n;
756 tree vec;
758 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
759 chains available. Return NULL so the caller can detect this case. */
760 if (!recording_case_labels_p ())
761 return NULL;
763 restart:
764 elt.e = e;
765 elt.case_labels = NULL;
766 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
768 if (slot)
770 elt_p = (struct edge_to_cases_elt *)*slot;
771 return elt_p->case_labels;
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
778 vec = SWITCH_LABELS (t);
779 n = TREE_VEC_LENGTH (vec);
780 for (i = 0; i < n; i++)
782 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
783 basic_block label_bb = label_to_block (lab);
784 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
786 goto restart;
789 /* Create the edges for a SWITCH_EXPR starting at block BB.
790 At this point, the switch body has been lowered and the
791 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
793 static void
794 make_switch_expr_edges (basic_block bb)
796 tree entry = last_stmt (bb);
797 size_t i, n;
798 tree vec;
800 vec = SWITCH_LABELS (entry);
801 n = TREE_VEC_LENGTH (vec);
803 for (i = 0; i < n; ++i)
805 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
806 basic_block label_bb = label_to_block (lab);
807 make_edge (bb, label_bb, 0);
812 /* Return the basic block holding label DEST. */
814 basic_block
815 label_to_block_fn (struct function *ifun, tree dest)
817 int uid = LABEL_DECL_UID (dest);
819 /* We would die hard when faced by an undefined label. Emit a label to
820 the very first basic block. This will hopefully make even the dataflow
821 and undefined variable warnings quite right. */
822 if ((errorcount || sorrycount) && uid < 0)
824 block_stmt_iterator bsi =
825 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
826 tree stmt;
828 stmt = build1 (LABEL_EXPR, void_type_node, dest);
829 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
830 uid = LABEL_DECL_UID (dest);
832 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
833 <= (unsigned int) uid)
834 return NULL;
835 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
838 /* Create edges for a goto statement at block BB. */
840 static void
841 make_goto_expr_edges (basic_block bb)
843 tree goto_t;
844 basic_block target_bb;
845 bool for_call;
846 block_stmt_iterator last = bsi_last (bb);
848 goto_t = bsi_stmt (last);
850 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
851 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
852 from a nonlocal goto. */
853 if (TREE_CODE (goto_t) != GOTO_EXPR)
854 for_call = true;
855 else
857 tree dest = GOTO_DESTINATION (goto_t);
858 for_call = false;
860 /* A GOTO to a local label creates normal edges. */
861 if (simple_goto_p (goto_t))
863 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
864 #ifdef USE_MAPPED_LOCATION
865 e->goto_locus = EXPR_LOCATION (goto_t);
866 #else
867 e->goto_locus = EXPR_LOCUS (goto_t);
868 #endif
869 bsi_remove (&last, true);
870 return;
873 /* Nothing more to do for nonlocal gotos. */
874 if (TREE_CODE (dest) == LABEL_DECL)
875 return;
877 /* Computed gotos remain. */
880 /* Look for the block starting with the destination label. In the
881 case of a computed goto, make an edge to any label block we find
882 in the CFG. */
883 FOR_EACH_BB (target_bb)
885 block_stmt_iterator bsi;
887 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
889 tree target = bsi_stmt (bsi);
891 if (TREE_CODE (target) != LABEL_EXPR)
892 break;
894 if (
895 /* Computed GOTOs. Make an edge to every label block that has
896 been marked as a potential target for a computed goto. */
897 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && !for_call)
898 /* Nonlocal GOTO target. Make an edge to every label block
899 that has been marked as a potential target for a nonlocal
900 goto. */
901 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call))
903 make_edge (bb, target_bb, EDGE_ABNORMAL);
904 break;
911 /*---------------------------------------------------------------------------
912 Flowgraph analysis
913 ---------------------------------------------------------------------------*/
915 /* Cleanup useless labels in basic blocks. This is something we wish
916 to do early because it allows us to group case labels before creating
917 the edges for the CFG, and it speeds up block statement iterators in
918 all passes later on.
919 We only run this pass once, running it more than once is probably not
920 profitable. */
922 /* A map from basic block index to the leading label of that block. */
923 static tree *label_for_bb;
925 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
926 static void
927 update_eh_label (struct eh_region *region)
929 tree old_label = get_eh_region_tree_label (region);
930 if (old_label)
932 tree new_label;
933 basic_block bb = label_to_block (old_label);
935 /* ??? After optimizing, there may be EH regions with labels
936 that have already been removed from the function body, so
937 there is no basic block for them. */
938 if (! bb)
939 return;
941 new_label = label_for_bb[bb->index];
942 set_eh_region_tree_label (region, new_label);
946 /* Given LABEL return the first label in the same basic block. */
947 static tree
948 main_block_label (tree label)
950 basic_block bb = label_to_block (label);
952 /* label_to_block possibly inserted undefined label into the chain. */
953 if (!label_for_bb[bb->index])
954 label_for_bb[bb->index] = label;
955 return label_for_bb[bb->index];
958 /* Cleanup redundant labels. This is a three-step process:
959 1) Find the leading label for each block.
960 2) Redirect all references to labels to the leading labels.
961 3) Cleanup all useless labels. */
963 void
964 cleanup_dead_labels (void)
966 basic_block bb;
967 label_for_bb = XCNEWVEC (tree, last_basic_block);
969 /* Find a suitable label for each block. We use the first user-defined
970 label if there is one, or otherwise just the first label we see. */
971 FOR_EACH_BB (bb)
973 block_stmt_iterator i;
975 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
977 tree label, stmt = bsi_stmt (i);
979 if (TREE_CODE (stmt) != LABEL_EXPR)
980 break;
982 label = LABEL_EXPR_LABEL (stmt);
984 /* If we have not yet seen a label for the current block,
985 remember this one and see if there are more labels. */
986 if (! label_for_bb[bb->index])
988 label_for_bb[bb->index] = label;
989 continue;
992 /* If we did see a label for the current block already, but it
993 is an artificially created label, replace it if the current
994 label is a user defined label. */
995 if (! DECL_ARTIFICIAL (label)
996 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
998 label_for_bb[bb->index] = label;
999 break;
1004 /* Now redirect all jumps/branches to the selected label.
1005 First do so for each block ending in a control statement. */
1006 FOR_EACH_BB (bb)
1008 tree stmt = last_stmt (bb);
1009 if (!stmt)
1010 continue;
1012 switch (TREE_CODE (stmt))
1014 case COND_EXPR:
1016 tree true_branch, false_branch;
1018 true_branch = COND_EXPR_THEN (stmt);
1019 false_branch = COND_EXPR_ELSE (stmt);
1021 GOTO_DESTINATION (true_branch)
1022 = main_block_label (GOTO_DESTINATION (true_branch));
1023 GOTO_DESTINATION (false_branch)
1024 = main_block_label (GOTO_DESTINATION (false_branch));
1026 break;
1029 case SWITCH_EXPR:
1031 size_t i;
1032 tree vec = SWITCH_LABELS (stmt);
1033 size_t n = TREE_VEC_LENGTH (vec);
1035 /* Replace all destination labels. */
1036 for (i = 0; i < n; ++i)
1038 tree elt = TREE_VEC_ELT (vec, i);
1039 tree label = main_block_label (CASE_LABEL (elt));
1040 CASE_LABEL (elt) = label;
1042 break;
1045 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1046 remove them until after we've created the CFG edges. */
1047 case GOTO_EXPR:
1048 if (! computed_goto_p (stmt))
1050 GOTO_DESTINATION (stmt)
1051 = main_block_label (GOTO_DESTINATION (stmt));
1052 break;
1055 default:
1056 break;
1060 for_each_eh_region (update_eh_label);
1062 /* Finally, purge dead labels. All user-defined labels and labels that
1063 can be the target of non-local gotos and labels which have their
1064 address taken are preserved. */
1065 FOR_EACH_BB (bb)
1067 block_stmt_iterator i;
1068 tree label_for_this_bb = label_for_bb[bb->index];
1070 if (! label_for_this_bb)
1071 continue;
1073 for (i = bsi_start (bb); !bsi_end_p (i); )
1075 tree label, stmt = bsi_stmt (i);
1077 if (TREE_CODE (stmt) != LABEL_EXPR)
1078 break;
1080 label = LABEL_EXPR_LABEL (stmt);
1082 if (label == label_for_this_bb
1083 || ! DECL_ARTIFICIAL (label)
1084 || DECL_NONLOCAL (label)
1085 || FORCED_LABEL (label))
1086 bsi_next (&i);
1087 else
1088 bsi_remove (&i, true);
1092 free (label_for_bb);
1095 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1096 and scan the sorted vector of cases. Combine the ones jumping to the
1097 same label.
1098 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1100 void
1101 group_case_labels (void)
1103 basic_block bb;
1105 FOR_EACH_BB (bb)
1107 tree stmt = last_stmt (bb);
1108 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1110 tree labels = SWITCH_LABELS (stmt);
1111 int old_size = TREE_VEC_LENGTH (labels);
1112 int i, j, new_size = old_size;
1113 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1114 tree default_label;
1116 /* The default label is always the last case in a switch
1117 statement after gimplification. */
1118 default_label = CASE_LABEL (default_case);
1120 /* Look for possible opportunities to merge cases.
1121 Ignore the last element of the label vector because it
1122 must be the default case. */
1123 i = 0;
1124 while (i < old_size - 1)
1126 tree base_case, base_label, base_high;
1127 base_case = TREE_VEC_ELT (labels, i);
1129 gcc_assert (base_case);
1130 base_label = CASE_LABEL (base_case);
1132 /* Discard cases that have the same destination as the
1133 default case. */
1134 if (base_label == default_label)
1136 TREE_VEC_ELT (labels, i) = NULL_TREE;
1137 i++;
1138 new_size--;
1139 continue;
1142 base_high = CASE_HIGH (base_case) ?
1143 CASE_HIGH (base_case) : CASE_LOW (base_case);
1144 i++;
1145 /* Try to merge case labels. Break out when we reach the end
1146 of the label vector or when we cannot merge the next case
1147 label with the current one. */
1148 while (i < old_size - 1)
1150 tree merge_case = TREE_VEC_ELT (labels, i);
1151 tree merge_label = CASE_LABEL (merge_case);
1152 tree t = int_const_binop (PLUS_EXPR, base_high,
1153 integer_one_node, 1);
1155 /* Merge the cases if they jump to the same place,
1156 and their ranges are consecutive. */
1157 if (merge_label == base_label
1158 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1160 base_high = CASE_HIGH (merge_case) ?
1161 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1162 CASE_HIGH (base_case) = base_high;
1163 TREE_VEC_ELT (labels, i) = NULL_TREE;
1164 new_size--;
1165 i++;
1167 else
1168 break;
1172 /* Compress the case labels in the label vector, and adjust the
1173 length of the vector. */
1174 for (i = 0, j = 0; i < new_size; i++)
1176 while (! TREE_VEC_ELT (labels, j))
1177 j++;
1178 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1180 TREE_VEC_LENGTH (labels) = new_size;
1185 /* Checks whether we can merge block B into block A. */
1187 static bool
1188 tree_can_merge_blocks_p (basic_block a, basic_block b)
1190 tree stmt;
1191 block_stmt_iterator bsi;
1192 tree phi;
1194 if (!single_succ_p (a))
1195 return false;
1197 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1198 return false;
1200 if (single_succ (a) != b)
1201 return false;
1203 if (!single_pred_p (b))
1204 return false;
1206 if (b == EXIT_BLOCK_PTR)
1207 return false;
1209 /* If A ends by a statement causing exceptions or something similar, we
1210 cannot merge the blocks. */
1211 stmt = last_stmt (a);
1212 if (stmt && stmt_ends_bb_p (stmt))
1213 return false;
1215 /* Do not allow a block with only a non-local label to be merged. */
1216 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1217 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1218 return false;
1220 /* It must be possible to eliminate all phi nodes in B. If ssa form
1221 is not up-to-date, we cannot eliminate any phis. */
1222 phi = phi_nodes (b);
1223 if (phi)
1225 if (need_ssa_update_p ())
1226 return false;
1228 for (; phi; phi = PHI_CHAIN (phi))
1229 if (!is_gimple_reg (PHI_RESULT (phi))
1230 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1231 return false;
1234 /* Do not remove user labels. */
1235 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1237 stmt = bsi_stmt (bsi);
1238 if (TREE_CODE (stmt) != LABEL_EXPR)
1239 break;
1240 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1241 return false;
1244 /* Protect the loop latches. */
1245 if (current_loops
1246 && b->loop_father->latch == b)
1247 return false;
1249 return true;
1252 /* Replaces all uses of NAME by VAL. */
1254 void
1255 replace_uses_by (tree name, tree val)
1257 imm_use_iterator imm_iter;
1258 use_operand_p use;
1259 tree stmt;
1260 edge e;
1261 unsigned i;
1264 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1266 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1268 replace_exp (use, val);
1270 if (TREE_CODE (stmt) == PHI_NODE)
1272 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1273 if (e->flags & EDGE_ABNORMAL)
1275 /* This can only occur for virtual operands, since
1276 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1277 would prevent replacement. */
1278 gcc_assert (!is_gimple_reg (name));
1279 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1283 if (TREE_CODE (stmt) != PHI_NODE)
1285 tree rhs;
1287 fold_stmt_inplace (stmt);
1288 rhs = get_rhs (stmt);
1289 if (TREE_CODE (rhs) == ADDR_EXPR)
1290 recompute_tree_invariant_for_addr_expr (rhs);
1292 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1293 mark_new_vars_to_rename (stmt);
1297 gcc_assert (num_imm_uses (name) == 0);
1299 /* Also update the trees stored in loop structures. */
1300 if (current_loops)
1302 struct loop *loop;
1304 for (i = 0; i < current_loops->num; i++)
1306 loop = current_loops->parray[i];
1307 if (loop)
1308 substitute_in_loop_info (loop, name, val);
1313 /* Merge block B into block A. */
1315 static void
1316 tree_merge_blocks (basic_block a, basic_block b)
1318 block_stmt_iterator bsi;
1319 tree_stmt_iterator last;
1320 tree phi;
1322 if (dump_file)
1323 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1325 /* Remove all single-valued PHI nodes from block B of the form
1326 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1327 bsi = bsi_last (a);
1328 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1330 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1331 tree copy;
1332 bool may_replace_uses = may_propagate_copy (def, use);
1334 /* In case we have loops to care about, do not propagate arguments of
1335 loop closed ssa phi nodes. */
1336 if (current_loops
1337 && is_gimple_reg (def)
1338 && TREE_CODE (use) == SSA_NAME
1339 && a->loop_father != b->loop_father)
1340 may_replace_uses = false;
1342 if (!may_replace_uses)
1344 gcc_assert (is_gimple_reg (def));
1346 /* Note that just emitting the copies is fine -- there is no problem
1347 with ordering of phi nodes. This is because A is the single
1348 predecessor of B, therefore results of the phi nodes cannot
1349 appear as arguments of the phi nodes. */
1350 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1351 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1352 SET_PHI_RESULT (phi, NULL_TREE);
1353 SSA_NAME_DEF_STMT (def) = copy;
1355 else
1356 replace_uses_by (def, use);
1358 remove_phi_node (phi, NULL);
1361 /* Ensure that B follows A. */
1362 move_block_after (b, a);
1364 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1365 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1367 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1368 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1370 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1372 tree label = bsi_stmt (bsi);
1374 bsi_remove (&bsi, false);
1375 /* Now that we can thread computed gotos, we might have
1376 a situation where we have a forced label in block B
1377 However, the label at the start of block B might still be
1378 used in other ways (think about the runtime checking for
1379 Fortran assigned gotos). So we can not just delete the
1380 label. Instead we move the label to the start of block A. */
1381 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1383 block_stmt_iterator dest_bsi = bsi_start (a);
1384 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1387 else
1389 set_bb_for_stmt (bsi_stmt (bsi), a);
1390 bsi_next (&bsi);
1394 /* Merge the chains. */
1395 last = tsi_last (a->stmt_list);
1396 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1397 b->stmt_list = NULL;
1401 /* Return the one of two successors of BB that is not reachable by a
1402 reached by a complex edge, if there is one. Else, return BB. We use
1403 this in optimizations that use post-dominators for their heuristics,
1404 to catch the cases in C++ where function calls are involved. */
1406 basic_block
1407 single_noncomplex_succ (basic_block bb)
1409 edge e0, e1;
1410 if (EDGE_COUNT (bb->succs) != 2)
1411 return bb;
1413 e0 = EDGE_SUCC (bb, 0);
1414 e1 = EDGE_SUCC (bb, 1);
1415 if (e0->flags & EDGE_COMPLEX)
1416 return e1->dest;
1417 if (e1->flags & EDGE_COMPLEX)
1418 return e0->dest;
1420 return bb;
1424 /* Walk the function tree removing unnecessary statements.
1426 * Empty statement nodes are removed
1428 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1430 * Unnecessary COND_EXPRs are removed
1432 * Some unnecessary BIND_EXPRs are removed
1434 Clearly more work could be done. The trick is doing the analysis
1435 and removal fast enough to be a net improvement in compile times.
1437 Note that when we remove a control structure such as a COND_EXPR
1438 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1439 to ensure we eliminate all the useless code. */
1441 struct rus_data
1443 tree *last_goto;
1444 bool repeat;
1445 bool may_throw;
1446 bool may_branch;
1447 bool has_label;
1450 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1452 static bool
1453 remove_useless_stmts_warn_notreached (tree stmt)
1455 if (EXPR_HAS_LOCATION (stmt))
1457 location_t loc = EXPR_LOCATION (stmt);
1458 if (LOCATION_LINE (loc) > 0)
1460 warning (0, "%Hwill never be executed", &loc);
1461 return true;
1465 switch (TREE_CODE (stmt))
1467 case STATEMENT_LIST:
1469 tree_stmt_iterator i;
1470 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1471 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1472 return true;
1474 break;
1476 case COND_EXPR:
1477 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1478 return true;
1479 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1480 return true;
1481 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1482 return true;
1483 break;
1485 case TRY_FINALLY_EXPR:
1486 case TRY_CATCH_EXPR:
1487 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1488 return true;
1489 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1490 return true;
1491 break;
1493 case CATCH_EXPR:
1494 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1495 case EH_FILTER_EXPR:
1496 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1497 case BIND_EXPR:
1498 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1500 default:
1501 /* Not a live container. */
1502 break;
1505 return false;
1508 static void
1509 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1511 tree then_clause, else_clause, cond;
1512 bool save_has_label, then_has_label, else_has_label;
1514 save_has_label = data->has_label;
1515 data->has_label = false;
1516 data->last_goto = NULL;
1518 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1520 then_has_label = data->has_label;
1521 data->has_label = false;
1522 data->last_goto = NULL;
1524 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1526 else_has_label = data->has_label;
1527 data->has_label = save_has_label | then_has_label | else_has_label;
1529 then_clause = COND_EXPR_THEN (*stmt_p);
1530 else_clause = COND_EXPR_ELSE (*stmt_p);
1531 cond = fold (COND_EXPR_COND (*stmt_p));
1533 /* If neither arm does anything at all, we can remove the whole IF. */
1534 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1536 *stmt_p = build_empty_stmt ();
1537 data->repeat = true;
1540 /* If there are no reachable statements in an arm, then we can
1541 zap the entire conditional. */
1542 else if (integer_nonzerop (cond) && !else_has_label)
1544 if (warn_notreached)
1545 remove_useless_stmts_warn_notreached (else_clause);
1546 *stmt_p = then_clause;
1547 data->repeat = true;
1549 else if (integer_zerop (cond) && !then_has_label)
1551 if (warn_notreached)
1552 remove_useless_stmts_warn_notreached (then_clause);
1553 *stmt_p = else_clause;
1554 data->repeat = true;
1557 /* Check a couple of simple things on then/else with single stmts. */
1558 else
1560 tree then_stmt = expr_only (then_clause);
1561 tree else_stmt = expr_only (else_clause);
1563 /* Notice branches to a common destination. */
1564 if (then_stmt && else_stmt
1565 && TREE_CODE (then_stmt) == GOTO_EXPR
1566 && TREE_CODE (else_stmt) == GOTO_EXPR
1567 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1569 *stmt_p = then_stmt;
1570 data->repeat = true;
1573 /* If the THEN/ELSE clause merely assigns a value to a variable or
1574 parameter which is already known to contain that value, then
1575 remove the useless THEN/ELSE clause. */
1576 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1578 if (else_stmt
1579 && TREE_CODE (else_stmt) == MODIFY_EXPR
1580 && TREE_OPERAND (else_stmt, 0) == cond
1581 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1582 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1584 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1585 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1586 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1587 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1589 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1590 ? then_stmt : else_stmt);
1591 tree *location = (TREE_CODE (cond) == EQ_EXPR
1592 ? &COND_EXPR_THEN (*stmt_p)
1593 : &COND_EXPR_ELSE (*stmt_p));
1595 if (stmt
1596 && TREE_CODE (stmt) == MODIFY_EXPR
1597 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1598 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1599 *location = alloc_stmt_list ();
1603 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1604 would be re-introduced during lowering. */
1605 data->last_goto = NULL;
1609 static void
1610 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1612 bool save_may_branch, save_may_throw;
1613 bool this_may_branch, this_may_throw;
1615 /* Collect may_branch and may_throw information for the body only. */
1616 save_may_branch = data->may_branch;
1617 save_may_throw = data->may_throw;
1618 data->may_branch = false;
1619 data->may_throw = false;
1620 data->last_goto = NULL;
1622 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1624 this_may_branch = data->may_branch;
1625 this_may_throw = data->may_throw;
1626 data->may_branch |= save_may_branch;
1627 data->may_throw |= save_may_throw;
1628 data->last_goto = NULL;
1630 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1632 /* If the body is empty, then we can emit the FINALLY block without
1633 the enclosing TRY_FINALLY_EXPR. */
1634 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1636 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1637 data->repeat = true;
1640 /* If the handler is empty, then we can emit the TRY block without
1641 the enclosing TRY_FINALLY_EXPR. */
1642 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1644 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1645 data->repeat = true;
1648 /* If the body neither throws, nor branches, then we can safely
1649 string the TRY and FINALLY blocks together. */
1650 else if (!this_may_branch && !this_may_throw)
1652 tree stmt = *stmt_p;
1653 *stmt_p = TREE_OPERAND (stmt, 0);
1654 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1655 data->repeat = true;
1660 static void
1661 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1663 bool save_may_throw, this_may_throw;
1664 tree_stmt_iterator i;
1665 tree stmt;
1667 /* Collect may_throw information for the body only. */
1668 save_may_throw = data->may_throw;
1669 data->may_throw = false;
1670 data->last_goto = NULL;
1672 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1674 this_may_throw = data->may_throw;
1675 data->may_throw = save_may_throw;
1677 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1678 if (!this_may_throw)
1680 if (warn_notreached)
1681 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1682 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1683 data->repeat = true;
1684 return;
1687 /* Process the catch clause specially. We may be able to tell that
1688 no exceptions propagate past this point. */
1690 this_may_throw = true;
1691 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1692 stmt = tsi_stmt (i);
1693 data->last_goto = NULL;
1695 switch (TREE_CODE (stmt))
1697 case CATCH_EXPR:
1698 for (; !tsi_end_p (i); tsi_next (&i))
1700 stmt = tsi_stmt (i);
1701 /* If we catch all exceptions, then the body does not
1702 propagate exceptions past this point. */
1703 if (CATCH_TYPES (stmt) == NULL)
1704 this_may_throw = false;
1705 data->last_goto = NULL;
1706 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1708 break;
1710 case EH_FILTER_EXPR:
1711 if (EH_FILTER_MUST_NOT_THROW (stmt))
1712 this_may_throw = false;
1713 else if (EH_FILTER_TYPES (stmt) == NULL)
1714 this_may_throw = false;
1715 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1716 break;
1718 default:
1719 /* Otherwise this is a cleanup. */
1720 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1722 /* If the cleanup is empty, then we can emit the TRY block without
1723 the enclosing TRY_CATCH_EXPR. */
1724 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1726 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1727 data->repeat = true;
1729 break;
1731 data->may_throw |= this_may_throw;
1735 static void
1736 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1738 tree block;
1740 /* First remove anything underneath the BIND_EXPR. */
1741 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1743 /* If the BIND_EXPR has no variables, then we can pull everything
1744 up one level and remove the BIND_EXPR, unless this is the toplevel
1745 BIND_EXPR for the current function or an inlined function.
1747 When this situation occurs we will want to apply this
1748 optimization again. */
1749 block = BIND_EXPR_BLOCK (*stmt_p);
1750 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1751 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1752 && (! block
1753 || ! BLOCK_ABSTRACT_ORIGIN (block)
1754 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1755 != FUNCTION_DECL)))
1757 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1758 data->repeat = true;
1763 static void
1764 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1766 tree dest = GOTO_DESTINATION (*stmt_p);
1768 data->may_branch = true;
1769 data->last_goto = NULL;
1771 /* Record the last goto expr, so that we can delete it if unnecessary. */
1772 if (TREE_CODE (dest) == LABEL_DECL)
1773 data->last_goto = stmt_p;
1777 static void
1778 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1780 tree label = LABEL_EXPR_LABEL (*stmt_p);
1782 data->has_label = true;
1784 /* We do want to jump across non-local label receiver code. */
1785 if (DECL_NONLOCAL (label))
1786 data->last_goto = NULL;
1788 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1790 *data->last_goto = build_empty_stmt ();
1791 data->repeat = true;
1794 /* ??? Add something here to delete unused labels. */
1798 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1799 decl. This allows us to eliminate redundant or useless
1800 calls to "const" functions.
1802 Gimplifier already does the same operation, but we may notice functions
1803 being const and pure once their calls has been gimplified, so we need
1804 to update the flag. */
1806 static void
1807 update_call_expr_flags (tree call)
1809 tree decl = get_callee_fndecl (call);
1810 if (!decl)
1811 return;
1812 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1813 TREE_SIDE_EFFECTS (call) = 0;
1814 if (TREE_NOTHROW (decl))
1815 TREE_NOTHROW (call) = 1;
1819 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1821 void
1822 notice_special_calls (tree t)
1824 int flags = call_expr_flags (t);
1826 if (flags & ECF_MAY_BE_ALLOCA)
1827 current_function_calls_alloca = true;
1828 if (flags & ECF_RETURNS_TWICE)
1829 current_function_calls_setjmp = true;
1833 /* Clear flags set by notice_special_calls. Used by dead code removal
1834 to update the flags. */
1836 void
1837 clear_special_calls (void)
1839 current_function_calls_alloca = false;
1840 current_function_calls_setjmp = false;
1844 static void
1845 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1847 tree t = *tp, op;
1849 switch (TREE_CODE (t))
1851 case COND_EXPR:
1852 remove_useless_stmts_cond (tp, data);
1853 break;
1855 case TRY_FINALLY_EXPR:
1856 remove_useless_stmts_tf (tp, data);
1857 break;
1859 case TRY_CATCH_EXPR:
1860 remove_useless_stmts_tc (tp, data);
1861 break;
1863 case BIND_EXPR:
1864 remove_useless_stmts_bind (tp, data);
1865 break;
1867 case GOTO_EXPR:
1868 remove_useless_stmts_goto (tp, data);
1869 break;
1871 case LABEL_EXPR:
1872 remove_useless_stmts_label (tp, data);
1873 break;
1875 case RETURN_EXPR:
1876 fold_stmt (tp);
1877 data->last_goto = NULL;
1878 data->may_branch = true;
1879 break;
1881 case CALL_EXPR:
1882 fold_stmt (tp);
1883 data->last_goto = NULL;
1884 notice_special_calls (t);
1885 update_call_expr_flags (t);
1886 if (tree_could_throw_p (t))
1887 data->may_throw = true;
1888 break;
1890 case MODIFY_EXPR:
1891 data->last_goto = NULL;
1892 fold_stmt (tp);
1893 op = get_call_expr_in (t);
1894 if (op)
1896 update_call_expr_flags (op);
1897 notice_special_calls (op);
1899 if (tree_could_throw_p (t))
1900 data->may_throw = true;
1901 break;
1903 case STATEMENT_LIST:
1905 tree_stmt_iterator i = tsi_start (t);
1906 while (!tsi_end_p (i))
1908 t = tsi_stmt (i);
1909 if (IS_EMPTY_STMT (t))
1911 tsi_delink (&i);
1912 continue;
1915 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1917 t = tsi_stmt (i);
1918 if (TREE_CODE (t) == STATEMENT_LIST)
1920 tsi_link_before (&i, t, TSI_SAME_STMT);
1921 tsi_delink (&i);
1923 else
1924 tsi_next (&i);
1927 break;
1928 case ASM_EXPR:
1929 fold_stmt (tp);
1930 data->last_goto = NULL;
1931 break;
1933 default:
1934 data->last_goto = NULL;
1935 break;
1939 static unsigned int
1940 remove_useless_stmts (void)
1942 struct rus_data data;
1944 clear_special_calls ();
1948 memset (&data, 0, sizeof (data));
1949 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1951 while (data.repeat);
1952 return 0;
1956 struct tree_opt_pass pass_remove_useless_stmts =
1958 "useless", /* name */
1959 NULL, /* gate */
1960 remove_useless_stmts, /* execute */
1961 NULL, /* sub */
1962 NULL, /* next */
1963 0, /* static_pass_number */
1964 0, /* tv_id */
1965 PROP_gimple_any, /* properties_required */
1966 0, /* properties_provided */
1967 0, /* properties_destroyed */
1968 0, /* todo_flags_start */
1969 TODO_dump_func, /* todo_flags_finish */
1970 0 /* letter */
1973 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1975 static void
1976 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1978 tree phi;
1980 /* Since this block is no longer reachable, we can just delete all
1981 of its PHI nodes. */
1982 phi = phi_nodes (bb);
1983 while (phi)
1985 tree next = PHI_CHAIN (phi);
1986 remove_phi_node (phi, NULL_TREE);
1987 phi = next;
1990 /* Remove edges to BB's successors. */
1991 while (EDGE_COUNT (bb->succs) > 0)
1992 remove_edge (EDGE_SUCC (bb, 0));
1996 /* Remove statements of basic block BB. */
1998 static void
1999 remove_bb (basic_block bb)
2001 block_stmt_iterator i;
2002 #ifdef USE_MAPPED_LOCATION
2003 source_location loc = UNKNOWN_LOCATION;
2004 #else
2005 source_locus loc = 0;
2006 #endif
2008 if (dump_file)
2010 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2011 if (dump_flags & TDF_DETAILS)
2013 dump_bb (bb, dump_file, 0);
2014 fprintf (dump_file, "\n");
2018 /* If we remove the header or the latch of a loop, mark the loop for
2019 removal by setting its header and latch to NULL. */
2020 if (current_loops)
2022 struct loop *loop = bb->loop_father;
2024 if (loop->latch == bb
2025 || loop->header == bb)
2027 loop->latch = NULL;
2028 loop->header = NULL;
2030 /* Also clean up the information associated with the loop. Updating
2031 it would waste time. More importantly, it may refer to ssa
2032 names that were defined in other removed basic block -- these
2033 ssa names are now removed and invalid. */
2034 free_numbers_of_iterations_estimates_loop (loop);
2038 /* Remove all the instructions in the block. */
2039 for (i = bsi_start (bb); !bsi_end_p (i);)
2041 tree stmt = bsi_stmt (i);
2042 if (TREE_CODE (stmt) == LABEL_EXPR
2043 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2044 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2046 basic_block new_bb;
2047 block_stmt_iterator new_bsi;
2049 /* A non-reachable non-local label may still be referenced.
2050 But it no longer needs to carry the extra semantics of
2051 non-locality. */
2052 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2054 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2055 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2058 new_bb = bb->prev_bb;
2059 new_bsi = bsi_start (new_bb);
2060 bsi_remove (&i, false);
2061 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2063 else
2065 /* Release SSA definitions if we are in SSA. Note that we
2066 may be called when not in SSA. For example,
2067 final_cleanup calls this function via
2068 cleanup_tree_cfg. */
2069 if (in_ssa_p)
2070 release_defs (stmt);
2072 bsi_remove (&i, true);
2075 /* Don't warn for removed gotos. Gotos are often removed due to
2076 jump threading, thus resulting in bogus warnings. Not great,
2077 since this way we lose warnings for gotos in the original
2078 program that are indeed unreachable. */
2079 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2081 #ifdef USE_MAPPED_LOCATION
2082 if (EXPR_HAS_LOCATION (stmt))
2083 loc = EXPR_LOCATION (stmt);
2084 #else
2085 source_locus t;
2086 t = EXPR_LOCUS (stmt);
2087 if (t && LOCATION_LINE (*t) > 0)
2088 loc = t;
2089 #endif
2093 /* If requested, give a warning that the first statement in the
2094 block is unreachable. We walk statements backwards in the
2095 loop above, so the last statement we process is the first statement
2096 in the block. */
2097 #ifdef USE_MAPPED_LOCATION
2098 if (loc > BUILTINS_LOCATION)
2099 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2100 #else
2101 if (loc)
2102 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2103 #endif
2105 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2109 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2110 predicate VAL, return the edge that will be taken out of the block.
2111 If VAL does not match a unique edge, NULL is returned. */
2113 edge
2114 find_taken_edge (basic_block bb, tree val)
2116 tree stmt;
2118 stmt = last_stmt (bb);
2120 gcc_assert (stmt);
2121 gcc_assert (is_ctrl_stmt (stmt));
2122 gcc_assert (val);
2124 if (! is_gimple_min_invariant (val))
2125 return NULL;
2127 if (TREE_CODE (stmt) == COND_EXPR)
2128 return find_taken_edge_cond_expr (bb, val);
2130 if (TREE_CODE (stmt) == SWITCH_EXPR)
2131 return find_taken_edge_switch_expr (bb, val);
2133 if (computed_goto_p (stmt))
2134 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2136 gcc_unreachable ();
2139 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2140 statement, determine which of the outgoing edges will be taken out of the
2141 block. Return NULL if either edge may be taken. */
2143 static edge
2144 find_taken_edge_computed_goto (basic_block bb, tree val)
2146 basic_block dest;
2147 edge e = NULL;
2149 dest = label_to_block (val);
2150 if (dest)
2152 e = find_edge (bb, dest);
2153 gcc_assert (e != NULL);
2156 return e;
2159 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2160 statement, determine which of the two edges will be taken out of the
2161 block. Return NULL if either edge may be taken. */
2163 static edge
2164 find_taken_edge_cond_expr (basic_block bb, tree val)
2166 edge true_edge, false_edge;
2168 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2170 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2171 return (zero_p (val) ? false_edge : true_edge);
2174 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2175 statement, determine which edge will be taken out of the block. Return
2176 NULL if any edge may be taken. */
2178 static edge
2179 find_taken_edge_switch_expr (basic_block bb, tree val)
2181 tree switch_expr, taken_case;
2182 basic_block dest_bb;
2183 edge e;
2185 switch_expr = last_stmt (bb);
2186 taken_case = find_case_label_for_value (switch_expr, val);
2187 dest_bb = label_to_block (CASE_LABEL (taken_case));
2189 e = find_edge (bb, dest_bb);
2190 gcc_assert (e);
2191 return e;
2195 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2196 We can make optimal use here of the fact that the case labels are
2197 sorted: We can do a binary search for a case matching VAL. */
2199 static tree
2200 find_case_label_for_value (tree switch_expr, tree val)
2202 tree vec = SWITCH_LABELS (switch_expr);
2203 size_t low, high, n = TREE_VEC_LENGTH (vec);
2204 tree default_case = TREE_VEC_ELT (vec, n - 1);
2206 for (low = -1, high = n - 1; high - low > 1; )
2208 size_t i = (high + low) / 2;
2209 tree t = TREE_VEC_ELT (vec, i);
2210 int cmp;
2212 /* Cache the result of comparing CASE_LOW and val. */
2213 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2215 if (cmp > 0)
2216 high = i;
2217 else
2218 low = i;
2220 if (CASE_HIGH (t) == NULL)
2222 /* A singe-valued case label. */
2223 if (cmp == 0)
2224 return t;
2226 else
2228 /* A case range. We can only handle integer ranges. */
2229 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2230 return t;
2234 return default_case;
2240 /*---------------------------------------------------------------------------
2241 Debugging functions
2242 ---------------------------------------------------------------------------*/
2244 /* Dump tree-specific information of block BB to file OUTF. */
2246 void
2247 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2249 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2253 /* Dump a basic block on stderr. */
2255 void
2256 debug_tree_bb (basic_block bb)
2258 dump_bb (bb, stderr, 0);
2262 /* Dump basic block with index N on stderr. */
2264 basic_block
2265 debug_tree_bb_n (int n)
2267 debug_tree_bb (BASIC_BLOCK (n));
2268 return BASIC_BLOCK (n);
2272 /* Dump the CFG on stderr.
2274 FLAGS are the same used by the tree dumping functions
2275 (see TDF_* in tree-pass.h). */
2277 void
2278 debug_tree_cfg (int flags)
2280 dump_tree_cfg (stderr, flags);
2284 /* Dump the program showing basic block boundaries on the given FILE.
2286 FLAGS are the same used by the tree dumping functions (see TDF_* in
2287 tree.h). */
2289 void
2290 dump_tree_cfg (FILE *file, int flags)
2292 if (flags & TDF_DETAILS)
2294 const char *funcname
2295 = lang_hooks.decl_printable_name (current_function_decl, 2);
2297 fputc ('\n', file);
2298 fprintf (file, ";; Function %s\n\n", funcname);
2299 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2300 n_basic_blocks, n_edges, last_basic_block);
2302 brief_dump_cfg (file);
2303 fprintf (file, "\n");
2306 if (flags & TDF_STATS)
2307 dump_cfg_stats (file);
2309 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2313 /* Dump CFG statistics on FILE. */
2315 void
2316 dump_cfg_stats (FILE *file)
2318 static long max_num_merged_labels = 0;
2319 unsigned long size, total = 0;
2320 long num_edges;
2321 basic_block bb;
2322 const char * const fmt_str = "%-30s%-13s%12s\n";
2323 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2324 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2325 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2326 const char *funcname
2327 = lang_hooks.decl_printable_name (current_function_decl, 2);
2330 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2332 fprintf (file, "---------------------------------------------------------\n");
2333 fprintf (file, fmt_str, "", " Number of ", "Memory");
2334 fprintf (file, fmt_str, "", " instances ", "used ");
2335 fprintf (file, "---------------------------------------------------------\n");
2337 size = n_basic_blocks * sizeof (struct basic_block_def);
2338 total += size;
2339 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2340 SCALE (size), LABEL (size));
2342 num_edges = 0;
2343 FOR_EACH_BB (bb)
2344 num_edges += EDGE_COUNT (bb->succs);
2345 size = num_edges * sizeof (struct edge_def);
2346 total += size;
2347 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2349 fprintf (file, "---------------------------------------------------------\n");
2350 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2351 LABEL (total));
2352 fprintf (file, "---------------------------------------------------------\n");
2353 fprintf (file, "\n");
2355 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2356 max_num_merged_labels = cfg_stats.num_merged_labels;
2358 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2359 cfg_stats.num_merged_labels, max_num_merged_labels);
2361 fprintf (file, "\n");
2365 /* Dump CFG statistics on stderr. Keep extern so that it's always
2366 linked in the final executable. */
2368 void
2369 debug_cfg_stats (void)
2371 dump_cfg_stats (stderr);
2375 /* Dump the flowgraph to a .vcg FILE. */
2377 static void
2378 tree_cfg2vcg (FILE *file)
2380 edge e;
2381 edge_iterator ei;
2382 basic_block bb;
2383 const char *funcname
2384 = lang_hooks.decl_printable_name (current_function_decl, 2);
2386 /* Write the file header. */
2387 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2388 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2389 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2391 /* Write blocks and edges. */
2392 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2394 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2395 e->dest->index);
2397 if (e->flags & EDGE_FAKE)
2398 fprintf (file, " linestyle: dotted priority: 10");
2399 else
2400 fprintf (file, " linestyle: solid priority: 100");
2402 fprintf (file, " }\n");
2404 fputc ('\n', file);
2406 FOR_EACH_BB (bb)
2408 enum tree_code head_code, end_code;
2409 const char *head_name, *end_name;
2410 int head_line = 0;
2411 int end_line = 0;
2412 tree first = first_stmt (bb);
2413 tree last = last_stmt (bb);
2415 if (first)
2417 head_code = TREE_CODE (first);
2418 head_name = tree_code_name[head_code];
2419 head_line = get_lineno (first);
2421 else
2422 head_name = "no-statement";
2424 if (last)
2426 end_code = TREE_CODE (last);
2427 end_name = tree_code_name[end_code];
2428 end_line = get_lineno (last);
2430 else
2431 end_name = "no-statement";
2433 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2434 bb->index, bb->index, head_name, head_line, end_name,
2435 end_line);
2437 FOR_EACH_EDGE (e, ei, bb->succs)
2439 if (e->dest == EXIT_BLOCK_PTR)
2440 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2441 else
2442 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2444 if (e->flags & EDGE_FAKE)
2445 fprintf (file, " priority: 10 linestyle: dotted");
2446 else
2447 fprintf (file, " priority: 100 linestyle: solid");
2449 fprintf (file, " }\n");
2452 if (bb->next_bb != EXIT_BLOCK_PTR)
2453 fputc ('\n', file);
2456 fputs ("}\n\n", file);
2461 /*---------------------------------------------------------------------------
2462 Miscellaneous helpers
2463 ---------------------------------------------------------------------------*/
2465 /* Return true if T represents a stmt that always transfers control. */
2467 bool
2468 is_ctrl_stmt (tree t)
2470 return (TREE_CODE (t) == COND_EXPR
2471 || TREE_CODE (t) == SWITCH_EXPR
2472 || TREE_CODE (t) == GOTO_EXPR
2473 || TREE_CODE (t) == RETURN_EXPR
2474 || TREE_CODE (t) == RESX_EXPR);
2478 /* Return true if T is a statement that may alter the flow of control
2479 (e.g., a call to a non-returning function). */
2481 bool
2482 is_ctrl_altering_stmt (tree t)
2484 tree call;
2486 gcc_assert (t);
2487 call = get_call_expr_in (t);
2488 if (call)
2490 /* A non-pure/const CALL_EXPR alters flow control if the current
2491 function has nonlocal labels. */
2492 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2493 return true;
2495 /* A CALL_EXPR also alters control flow if it does not return. */
2496 if (call_expr_flags (call) & ECF_NORETURN)
2497 return true;
2500 /* OpenMP directives alter control flow. */
2501 if (OMP_DIRECTIVE_P (t))
2502 return true;
2504 /* If a statement can throw, it alters control flow. */
2505 return tree_can_throw_internal (t);
2509 /* Return true if T is a computed goto. */
2511 bool
2512 computed_goto_p (tree t)
2514 return (TREE_CODE (t) == GOTO_EXPR
2515 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2519 /* Checks whether EXPR is a simple local goto. */
2521 bool
2522 simple_goto_p (tree expr)
2524 return (TREE_CODE (expr) == GOTO_EXPR
2525 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2529 /* Return true if T should start a new basic block. PREV_T is the
2530 statement preceding T. It is used when T is a label or a case label.
2531 Labels should only start a new basic block if their previous statement
2532 wasn't a label. Otherwise, sequence of labels would generate
2533 unnecessary basic blocks that only contain a single label. */
2535 static inline bool
2536 stmt_starts_bb_p (tree t, tree prev_t)
2538 if (t == NULL_TREE)
2539 return false;
2541 /* LABEL_EXPRs start a new basic block only if the preceding
2542 statement wasn't a label of the same type. This prevents the
2543 creation of consecutive blocks that have nothing but a single
2544 label. */
2545 if (TREE_CODE (t) == LABEL_EXPR)
2547 /* Nonlocal and computed GOTO targets always start a new block. */
2548 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2549 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2550 return true;
2552 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2554 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2555 return true;
2557 cfg_stats.num_merged_labels++;
2558 return false;
2560 else
2561 return true;
2564 return false;
2568 /* Return true if T should end a basic block. */
2570 bool
2571 stmt_ends_bb_p (tree t)
2573 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2577 /* Add gotos that used to be represented implicitly in the CFG. */
2579 void
2580 disband_implicit_edges (void)
2582 basic_block bb;
2583 block_stmt_iterator last;
2584 edge e;
2585 edge_iterator ei;
2586 tree stmt, label;
2588 FOR_EACH_BB (bb)
2590 last = bsi_last (bb);
2591 stmt = last_stmt (bb);
2593 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2595 /* Remove superfluous gotos from COND_EXPR branches. Moved
2596 from cfg_remove_useless_stmts here since it violates the
2597 invariants for tree--cfg correspondence and thus fits better
2598 here where we do it anyway. */
2599 e = find_edge (bb, bb->next_bb);
2600 if (e)
2602 if (e->flags & EDGE_TRUE_VALUE)
2603 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2604 else if (e->flags & EDGE_FALSE_VALUE)
2605 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2606 else
2607 gcc_unreachable ();
2608 e->flags |= EDGE_FALLTHRU;
2611 continue;
2614 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2616 /* Remove the RETURN_EXPR if we may fall though to the exit
2617 instead. */
2618 gcc_assert (single_succ_p (bb));
2619 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2621 if (bb->next_bb == EXIT_BLOCK_PTR
2622 && !TREE_OPERAND (stmt, 0))
2624 bsi_remove (&last, true);
2625 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2627 continue;
2630 /* There can be no fallthru edge if the last statement is a control
2631 one. */
2632 if (stmt && is_ctrl_stmt (stmt))
2633 continue;
2635 /* Find a fallthru edge and emit the goto if necessary. */
2636 FOR_EACH_EDGE (e, ei, bb->succs)
2637 if (e->flags & EDGE_FALLTHRU)
2638 break;
2640 if (!e || e->dest == bb->next_bb)
2641 continue;
2643 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2644 label = tree_block_label (e->dest);
2646 stmt = build1 (GOTO_EXPR, void_type_node, label);
2647 #ifdef USE_MAPPED_LOCATION
2648 SET_EXPR_LOCATION (stmt, e->goto_locus);
2649 #else
2650 SET_EXPR_LOCUS (stmt, e->goto_locus);
2651 #endif
2652 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2653 e->flags &= ~EDGE_FALLTHRU;
2657 /* Remove block annotations and other datastructures. */
2659 void
2660 delete_tree_cfg_annotations (void)
2662 label_to_block_map = NULL;
2666 /* Return the first statement in basic block BB. */
2668 tree
2669 first_stmt (basic_block bb)
2671 block_stmt_iterator i = bsi_start (bb);
2672 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2676 /* Return the last statement in basic block BB. */
2678 tree
2679 last_stmt (basic_block bb)
2681 block_stmt_iterator b = bsi_last (bb);
2682 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2686 /* Return a pointer to the last statement in block BB. */
2688 tree *
2689 last_stmt_ptr (basic_block bb)
2691 block_stmt_iterator last = bsi_last (bb);
2692 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2696 /* Return the last statement of an otherwise empty block. Return NULL
2697 if the block is totally empty, or if it contains more than one
2698 statement. */
2700 tree
2701 last_and_only_stmt (basic_block bb)
2703 block_stmt_iterator i = bsi_last (bb);
2704 tree last, prev;
2706 if (bsi_end_p (i))
2707 return NULL_TREE;
2709 last = bsi_stmt (i);
2710 bsi_prev (&i);
2711 if (bsi_end_p (i))
2712 return last;
2714 /* Empty statements should no longer appear in the instruction stream.
2715 Everything that might have appeared before should be deleted by
2716 remove_useless_stmts, and the optimizers should just bsi_remove
2717 instead of smashing with build_empty_stmt.
2719 Thus the only thing that should appear here in a block containing
2720 one executable statement is a label. */
2721 prev = bsi_stmt (i);
2722 if (TREE_CODE (prev) == LABEL_EXPR)
2723 return last;
2724 else
2725 return NULL_TREE;
2729 /* Mark BB as the basic block holding statement T. */
2731 void
2732 set_bb_for_stmt (tree t, basic_block bb)
2734 if (TREE_CODE (t) == PHI_NODE)
2735 PHI_BB (t) = bb;
2736 else if (TREE_CODE (t) == STATEMENT_LIST)
2738 tree_stmt_iterator i;
2739 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2740 set_bb_for_stmt (tsi_stmt (i), bb);
2742 else
2744 stmt_ann_t ann = get_stmt_ann (t);
2745 ann->bb = bb;
2747 /* If the statement is a label, add the label to block-to-labels map
2748 so that we can speed up edge creation for GOTO_EXPRs. */
2749 if (TREE_CODE (t) == LABEL_EXPR)
2751 int uid;
2753 t = LABEL_EXPR_LABEL (t);
2754 uid = LABEL_DECL_UID (t);
2755 if (uid == -1)
2757 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2758 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2759 if (old_len <= (unsigned) uid)
2761 basic_block *addr;
2762 unsigned new_len = 3 * uid / 2;
2764 VEC_safe_grow (basic_block, gc, label_to_block_map,
2765 new_len);
2766 addr = VEC_address (basic_block, label_to_block_map);
2767 memset (&addr[old_len],
2768 0, sizeof (basic_block) * (new_len - old_len));
2771 else
2772 /* We're moving an existing label. Make sure that we've
2773 removed it from the old block. */
2774 gcc_assert (!bb
2775 || !VEC_index (basic_block, label_to_block_map, uid));
2776 VEC_replace (basic_block, label_to_block_map, uid, bb);
2781 /* Finds iterator for STMT. */
2783 extern block_stmt_iterator
2784 bsi_for_stmt (tree stmt)
2786 block_stmt_iterator bsi;
2788 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2789 if (bsi_stmt (bsi) == stmt)
2790 return bsi;
2792 gcc_unreachable ();
2795 /* Mark statement T as modified, and update it. */
2796 static inline void
2797 update_modified_stmts (tree t)
2799 if (TREE_CODE (t) == STATEMENT_LIST)
2801 tree_stmt_iterator i;
2802 tree stmt;
2803 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2805 stmt = tsi_stmt (i);
2806 update_stmt_if_modified (stmt);
2809 else
2810 update_stmt_if_modified (t);
2813 /* Insert statement (or statement list) T before the statement
2814 pointed-to by iterator I. M specifies how to update iterator I
2815 after insertion (see enum bsi_iterator_update). */
2817 void
2818 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2820 set_bb_for_stmt (t, i->bb);
2821 update_modified_stmts (t);
2822 tsi_link_before (&i->tsi, t, m);
2826 /* Insert statement (or statement list) T after the statement
2827 pointed-to by iterator I. M specifies how to update iterator I
2828 after insertion (see enum bsi_iterator_update). */
2830 void
2831 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2833 set_bb_for_stmt (t, i->bb);
2834 update_modified_stmts (t);
2835 tsi_link_after (&i->tsi, t, m);
2839 /* Remove the statement pointed to by iterator I. The iterator is updated
2840 to the next statement.
2842 When REMOVE_EH_INFO is true we remove the statement pointed to by
2843 iterator I from the EH tables. Otherwise we do not modify the EH
2844 tables.
2846 Generally, REMOVE_EH_INFO should be true when the statement is going to
2847 be removed from the IL and not reinserted elsewhere. */
2849 void
2850 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2852 tree t = bsi_stmt (*i);
2853 set_bb_for_stmt (t, NULL);
2854 delink_stmt_imm_use (t);
2855 tsi_delink (&i->tsi);
2856 mark_stmt_modified (t);
2857 if (remove_eh_info)
2858 remove_stmt_from_eh_region (t);
2862 /* Move the statement at FROM so it comes right after the statement at TO. */
2864 void
2865 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2867 tree stmt = bsi_stmt (*from);
2868 bsi_remove (from, false);
2869 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2873 /* Move the statement at FROM so it comes right before the statement at TO. */
2875 void
2876 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2878 tree stmt = bsi_stmt (*from);
2879 bsi_remove (from, false);
2880 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2884 /* Move the statement at FROM to the end of basic block BB. */
2886 void
2887 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2889 block_stmt_iterator last = bsi_last (bb);
2891 /* Have to check bsi_end_p because it could be an empty block. */
2892 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2893 bsi_move_before (from, &last);
2894 else
2895 bsi_move_after (from, &last);
2899 /* Replace the contents of the statement pointed to by iterator BSI
2900 with STMT. If UPDATE_EH_INFO is true, the exception handling
2901 information of the original statement is moved to the new statement. */
2903 void
2904 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2906 int eh_region;
2907 tree orig_stmt = bsi_stmt (*bsi);
2909 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2910 set_bb_for_stmt (stmt, bsi->bb);
2912 /* Preserve EH region information from the original statement, if
2913 requested by the caller. */
2914 if (update_eh_info)
2916 eh_region = lookup_stmt_eh_region (orig_stmt);
2917 if (eh_region >= 0)
2919 remove_stmt_from_eh_region (orig_stmt);
2920 add_stmt_to_eh_region (stmt, eh_region);
2924 delink_stmt_imm_use (orig_stmt);
2925 *bsi_stmt_ptr (*bsi) = stmt;
2926 mark_stmt_modified (stmt);
2927 update_modified_stmts (stmt);
2931 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2932 is made to place the statement in an existing basic block, but
2933 sometimes that isn't possible. When it isn't possible, the edge is
2934 split and the statement is added to the new block.
2936 In all cases, the returned *BSI points to the correct location. The
2937 return value is true if insertion should be done after the location,
2938 or false if it should be done before the location. If new basic block
2939 has to be created, it is stored in *NEW_BB. */
2941 static bool
2942 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2943 basic_block *new_bb)
2945 basic_block dest, src;
2946 tree tmp;
2948 dest = e->dest;
2949 restart:
2951 /* If the destination has one predecessor which has no PHI nodes,
2952 insert there. Except for the exit block.
2954 The requirement for no PHI nodes could be relaxed. Basically we
2955 would have to examine the PHIs to prove that none of them used
2956 the value set by the statement we want to insert on E. That
2957 hardly seems worth the effort. */
2958 if (single_pred_p (dest)
2959 && ! phi_nodes (dest)
2960 && dest != EXIT_BLOCK_PTR)
2962 *bsi = bsi_start (dest);
2963 if (bsi_end_p (*bsi))
2964 return true;
2966 /* Make sure we insert after any leading labels. */
2967 tmp = bsi_stmt (*bsi);
2968 while (TREE_CODE (tmp) == LABEL_EXPR)
2970 bsi_next (bsi);
2971 if (bsi_end_p (*bsi))
2972 break;
2973 tmp = bsi_stmt (*bsi);
2976 if (bsi_end_p (*bsi))
2978 *bsi = bsi_last (dest);
2979 return true;
2981 else
2982 return false;
2985 /* If the source has one successor, the edge is not abnormal and
2986 the last statement does not end a basic block, insert there.
2987 Except for the entry block. */
2988 src = e->src;
2989 if ((e->flags & EDGE_ABNORMAL) == 0
2990 && single_succ_p (src)
2991 && src != ENTRY_BLOCK_PTR)
2993 *bsi = bsi_last (src);
2994 if (bsi_end_p (*bsi))
2995 return true;
2997 tmp = bsi_stmt (*bsi);
2998 if (!stmt_ends_bb_p (tmp))
2999 return true;
3001 /* Insert code just before returning the value. We may need to decompose
3002 the return in the case it contains non-trivial operand. */
3003 if (TREE_CODE (tmp) == RETURN_EXPR)
3005 tree op = TREE_OPERAND (tmp, 0);
3006 if (op && !is_gimple_val (op))
3008 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3009 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3010 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3012 bsi_prev (bsi);
3013 return true;
3017 /* Otherwise, create a new basic block, and split this edge. */
3018 dest = split_edge (e);
3019 if (new_bb)
3020 *new_bb = dest;
3021 e = single_pred_edge (dest);
3022 goto restart;
3026 /* This routine will commit all pending edge insertions, creating any new
3027 basic blocks which are necessary. */
3029 void
3030 bsi_commit_edge_inserts (void)
3032 basic_block bb;
3033 edge e;
3034 edge_iterator ei;
3036 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3038 FOR_EACH_BB (bb)
3039 FOR_EACH_EDGE (e, ei, bb->succs)
3040 bsi_commit_one_edge_insert (e, NULL);
3044 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3045 to this block, otherwise set it to NULL. */
3047 void
3048 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3050 if (new_bb)
3051 *new_bb = NULL;
3052 if (PENDING_STMT (e))
3054 block_stmt_iterator bsi;
3055 tree stmt = PENDING_STMT (e);
3057 PENDING_STMT (e) = NULL_TREE;
3059 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3060 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3061 else
3062 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3067 /* Add STMT to the pending list of edge E. No actual insertion is
3068 made until a call to bsi_commit_edge_inserts () is made. */
3070 void
3071 bsi_insert_on_edge (edge e, tree stmt)
3073 append_to_statement_list (stmt, &PENDING_STMT (e));
3076 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3077 block has to be created, it is returned. */
3079 basic_block
3080 bsi_insert_on_edge_immediate (edge e, tree stmt)
3082 block_stmt_iterator bsi;
3083 basic_block new_bb = NULL;
3085 gcc_assert (!PENDING_STMT (e));
3087 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3088 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3089 else
3090 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3092 return new_bb;
3095 /*---------------------------------------------------------------------------
3096 Tree specific functions for CFG manipulation
3097 ---------------------------------------------------------------------------*/
3099 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3101 static void
3102 reinstall_phi_args (edge new_edge, edge old_edge)
3104 tree var, phi;
3106 if (!PENDING_STMT (old_edge))
3107 return;
3109 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3110 var && phi;
3111 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3113 tree result = TREE_PURPOSE (var);
3114 tree arg = TREE_VALUE (var);
3116 gcc_assert (result == PHI_RESULT (phi));
3118 add_phi_arg (phi, arg, new_edge);
3121 PENDING_STMT (old_edge) = NULL;
3124 /* Returns the basic block after that the new basic block created
3125 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3126 near its "logical" location. This is of most help to humans looking
3127 at debugging dumps. */
3129 static basic_block
3130 split_edge_bb_loc (edge edge_in)
3132 basic_block dest = edge_in->dest;
3134 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3135 return edge_in->src;
3136 else
3137 return dest->prev_bb;
3140 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3141 Abort on abnormal edges. */
3143 static basic_block
3144 tree_split_edge (edge edge_in)
3146 basic_block new_bb, after_bb, dest;
3147 edge new_edge, e;
3149 /* Abnormal edges cannot be split. */
3150 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3152 dest = edge_in->dest;
3154 after_bb = split_edge_bb_loc (edge_in);
3156 new_bb = create_empty_bb (after_bb);
3157 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3158 new_bb->count = edge_in->count;
3159 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3160 new_edge->probability = REG_BR_PROB_BASE;
3161 new_edge->count = edge_in->count;
3163 e = redirect_edge_and_branch (edge_in, new_bb);
3164 gcc_assert (e);
3165 reinstall_phi_args (new_edge, e);
3167 return new_bb;
3171 /* Return true when BB has label LABEL in it. */
3173 static bool
3174 has_label_p (basic_block bb, tree label)
3176 block_stmt_iterator bsi;
3178 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3180 tree stmt = bsi_stmt (bsi);
3182 if (TREE_CODE (stmt) != LABEL_EXPR)
3183 return false;
3184 if (LABEL_EXPR_LABEL (stmt) == label)
3185 return true;
3187 return false;
3191 /* Callback for walk_tree, check that all elements with address taken are
3192 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3193 inside a PHI node. */
3195 static tree
3196 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3198 tree t = *tp, x;
3199 bool in_phi = (data != NULL);
3201 if (TYPE_P (t))
3202 *walk_subtrees = 0;
3204 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3205 #define CHECK_OP(N, MSG) \
3206 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3207 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3209 switch (TREE_CODE (t))
3211 case SSA_NAME:
3212 if (SSA_NAME_IN_FREE_LIST (t))
3214 error ("SSA name in freelist but still referenced");
3215 return *tp;
3217 break;
3219 case ASSERT_EXPR:
3220 x = fold (ASSERT_EXPR_COND (t));
3221 if (x == boolean_false_node)
3223 error ("ASSERT_EXPR with an always-false condition");
3224 return *tp;
3226 break;
3228 case MODIFY_EXPR:
3229 x = TREE_OPERAND (t, 0);
3230 if (TREE_CODE (x) == BIT_FIELD_REF
3231 && is_gimple_reg (TREE_OPERAND (x, 0)))
3233 error ("GIMPLE register modified with BIT_FIELD_REF");
3234 return t;
3236 break;
3238 case ADDR_EXPR:
3240 bool old_invariant;
3241 bool old_constant;
3242 bool old_side_effects;
3243 bool new_invariant;
3244 bool new_constant;
3245 bool new_side_effects;
3247 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3248 dead PHIs that take the address of something. But if the PHI
3249 result is dead, the fact that it takes the address of anything
3250 is irrelevant. Because we can not tell from here if a PHI result
3251 is dead, we just skip this check for PHIs altogether. This means
3252 we may be missing "valid" checks, but what can you do?
3253 This was PR19217. */
3254 if (in_phi)
3255 break;
3257 old_invariant = TREE_INVARIANT (t);
3258 old_constant = TREE_CONSTANT (t);
3259 old_side_effects = TREE_SIDE_EFFECTS (t);
3261 recompute_tree_invariant_for_addr_expr (t);
3262 new_invariant = TREE_INVARIANT (t);
3263 new_side_effects = TREE_SIDE_EFFECTS (t);
3264 new_constant = TREE_CONSTANT (t);
3266 if (old_invariant != new_invariant)
3268 error ("invariant not recomputed when ADDR_EXPR changed");
3269 return t;
3272 if (old_constant != new_constant)
3274 error ("constant not recomputed when ADDR_EXPR changed");
3275 return t;
3277 if (old_side_effects != new_side_effects)
3279 error ("side effects not recomputed when ADDR_EXPR changed");
3280 return t;
3283 /* Skip any references (they will be checked when we recurse down the
3284 tree) and ensure that any variable used as a prefix is marked
3285 addressable. */
3286 for (x = TREE_OPERAND (t, 0);
3287 handled_component_p (x);
3288 x = TREE_OPERAND (x, 0))
3291 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3292 return NULL;
3293 if (!TREE_ADDRESSABLE (x))
3295 error ("address taken, but ADDRESSABLE bit not set");
3296 return x;
3298 break;
3301 case COND_EXPR:
3302 x = COND_EXPR_COND (t);
3303 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3305 error ("non-boolean used in condition");
3306 return x;
3308 if (!is_gimple_condexpr (x))
3310 error ("invalid conditional operand");
3311 return x;
3313 break;
3315 case NOP_EXPR:
3316 case CONVERT_EXPR:
3317 case FIX_TRUNC_EXPR:
3318 case FIX_CEIL_EXPR:
3319 case FIX_FLOOR_EXPR:
3320 case FIX_ROUND_EXPR:
3321 case FLOAT_EXPR:
3322 case NEGATE_EXPR:
3323 case ABS_EXPR:
3324 case BIT_NOT_EXPR:
3325 case NON_LVALUE_EXPR:
3326 case TRUTH_NOT_EXPR:
3327 CHECK_OP (0, "invalid operand to unary operator");
3328 break;
3330 case REALPART_EXPR:
3331 case IMAGPART_EXPR:
3332 case COMPONENT_REF:
3333 case ARRAY_REF:
3334 case ARRAY_RANGE_REF:
3335 case BIT_FIELD_REF:
3336 case VIEW_CONVERT_EXPR:
3337 /* We have a nest of references. Verify that each of the operands
3338 that determine where to reference is either a constant or a variable,
3339 verify that the base is valid, and then show we've already checked
3340 the subtrees. */
3341 while (handled_component_p (t))
3343 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3344 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3345 else if (TREE_CODE (t) == ARRAY_REF
3346 || TREE_CODE (t) == ARRAY_RANGE_REF)
3348 CHECK_OP (1, "invalid array index");
3349 if (TREE_OPERAND (t, 2))
3350 CHECK_OP (2, "invalid array lower bound");
3351 if (TREE_OPERAND (t, 3))
3352 CHECK_OP (3, "invalid array stride");
3354 else if (TREE_CODE (t) == BIT_FIELD_REF)
3356 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3357 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3360 t = TREE_OPERAND (t, 0);
3363 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3365 error ("invalid reference prefix");
3366 return t;
3368 *walk_subtrees = 0;
3369 break;
3371 case LT_EXPR:
3372 case LE_EXPR:
3373 case GT_EXPR:
3374 case GE_EXPR:
3375 case EQ_EXPR:
3376 case NE_EXPR:
3377 case UNORDERED_EXPR:
3378 case ORDERED_EXPR:
3379 case UNLT_EXPR:
3380 case UNLE_EXPR:
3381 case UNGT_EXPR:
3382 case UNGE_EXPR:
3383 case UNEQ_EXPR:
3384 case LTGT_EXPR:
3385 case PLUS_EXPR:
3386 case MINUS_EXPR:
3387 case MULT_EXPR:
3388 case TRUNC_DIV_EXPR:
3389 case CEIL_DIV_EXPR:
3390 case FLOOR_DIV_EXPR:
3391 case ROUND_DIV_EXPR:
3392 case TRUNC_MOD_EXPR:
3393 case CEIL_MOD_EXPR:
3394 case FLOOR_MOD_EXPR:
3395 case ROUND_MOD_EXPR:
3396 case RDIV_EXPR:
3397 case EXACT_DIV_EXPR:
3398 case MIN_EXPR:
3399 case MAX_EXPR:
3400 case LSHIFT_EXPR:
3401 case RSHIFT_EXPR:
3402 case LROTATE_EXPR:
3403 case RROTATE_EXPR:
3404 case BIT_IOR_EXPR:
3405 case BIT_XOR_EXPR:
3406 case BIT_AND_EXPR:
3407 CHECK_OP (0, "invalid operand to binary operator");
3408 CHECK_OP (1, "invalid operand to binary operator");
3409 break;
3411 default:
3412 break;
3414 return NULL;
3416 #undef CHECK_OP
3420 /* Verify STMT, return true if STMT is not in GIMPLE form.
3421 TODO: Implement type checking. */
3423 static bool
3424 verify_stmt (tree stmt, bool last_in_block)
3426 tree addr;
3428 if (OMP_DIRECTIVE_P (stmt))
3430 /* OpenMP directives are validated by the FE and never operated
3431 on by the optimizers. Furthermore, OMP_FOR may contain
3432 non-gimple expressions when the main index variable has had
3433 its address taken. This does not affect the loop itself
3434 because the header of an OMP_FOR is merely used to determine
3435 how to setup the parallel iteration. */
3436 return false;
3439 if (!is_gimple_stmt (stmt))
3441 error ("is not a valid GIMPLE statement");
3442 goto fail;
3445 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3446 if (addr)
3448 debug_generic_stmt (addr);
3449 return true;
3452 /* If the statement is marked as part of an EH region, then it is
3453 expected that the statement could throw. Verify that when we
3454 have optimizations that simplify statements such that we prove
3455 that they cannot throw, that we update other data structures
3456 to match. */
3457 if (lookup_stmt_eh_region (stmt) >= 0)
3459 if (!tree_could_throw_p (stmt))
3461 error ("statement marked for throw, but doesn%'t");
3462 goto fail;
3464 if (!last_in_block && tree_can_throw_internal (stmt))
3466 error ("statement marked for throw in middle of block");
3467 goto fail;
3471 return false;
3473 fail:
3474 debug_generic_stmt (stmt);
3475 return true;
3479 /* Return true when the T can be shared. */
3481 static bool
3482 tree_node_can_be_shared (tree t)
3484 if (IS_TYPE_OR_DECL_P (t)
3485 || is_gimple_min_invariant (t)
3486 || TREE_CODE (t) == SSA_NAME
3487 || t == error_mark_node
3488 || TREE_CODE (t) == IDENTIFIER_NODE)
3489 return true;
3491 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3492 return true;
3494 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3495 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3496 || TREE_CODE (t) == COMPONENT_REF
3497 || TREE_CODE (t) == REALPART_EXPR
3498 || TREE_CODE (t) == IMAGPART_EXPR)
3499 t = TREE_OPERAND (t, 0);
3501 if (DECL_P (t))
3502 return true;
3504 return false;
3508 /* Called via walk_trees. Verify tree sharing. */
3510 static tree
3511 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3513 htab_t htab = (htab_t) data;
3514 void **slot;
3516 if (tree_node_can_be_shared (*tp))
3518 *walk_subtrees = false;
3519 return NULL;
3522 slot = htab_find_slot (htab, *tp, INSERT);
3523 if (*slot)
3524 return (tree) *slot;
3525 *slot = *tp;
3527 return NULL;
3531 /* Verify the GIMPLE statement chain. */
3533 void
3534 verify_stmts (void)
3536 basic_block bb;
3537 block_stmt_iterator bsi;
3538 bool err = false;
3539 htab_t htab;
3540 tree addr;
3542 timevar_push (TV_TREE_STMT_VERIFY);
3543 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3545 FOR_EACH_BB (bb)
3547 tree phi;
3548 int i;
3550 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3552 int phi_num_args = PHI_NUM_ARGS (phi);
3554 if (bb_for_stmt (phi) != bb)
3556 error ("bb_for_stmt (phi) is set to a wrong basic block");
3557 err |= true;
3560 for (i = 0; i < phi_num_args; i++)
3562 tree t = PHI_ARG_DEF (phi, i);
3563 tree addr;
3565 /* Addressable variables do have SSA_NAMEs but they
3566 are not considered gimple values. */
3567 if (TREE_CODE (t) != SSA_NAME
3568 && TREE_CODE (t) != FUNCTION_DECL
3569 && !is_gimple_val (t))
3571 error ("PHI def is not a GIMPLE value");
3572 debug_generic_stmt (phi);
3573 debug_generic_stmt (t);
3574 err |= true;
3577 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3578 if (addr)
3580 debug_generic_stmt (addr);
3581 err |= true;
3584 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3585 if (addr)
3587 error ("incorrect sharing of tree nodes");
3588 debug_generic_stmt (phi);
3589 debug_generic_stmt (addr);
3590 err |= true;
3595 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3597 tree stmt = bsi_stmt (bsi);
3599 if (bb_for_stmt (stmt) != bb)
3601 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3602 err |= true;
3605 bsi_next (&bsi);
3606 err |= verify_stmt (stmt, bsi_end_p (bsi));
3607 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3608 if (addr)
3610 error ("incorrect sharing of tree nodes");
3611 debug_generic_stmt (stmt);
3612 debug_generic_stmt (addr);
3613 err |= true;
3618 if (err)
3619 internal_error ("verify_stmts failed");
3621 htab_delete (htab);
3622 timevar_pop (TV_TREE_STMT_VERIFY);
3626 /* Verifies that the flow information is OK. */
3628 static int
3629 tree_verify_flow_info (void)
3631 int err = 0;
3632 basic_block bb;
3633 block_stmt_iterator bsi;
3634 tree stmt;
3635 edge e;
3636 edge_iterator ei;
3638 if (ENTRY_BLOCK_PTR->stmt_list)
3640 error ("ENTRY_BLOCK has a statement list associated with it");
3641 err = 1;
3644 if (EXIT_BLOCK_PTR->stmt_list)
3646 error ("EXIT_BLOCK has a statement list associated with it");
3647 err = 1;
3650 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3651 if (e->flags & EDGE_FALLTHRU)
3653 error ("fallthru to exit from bb %d", e->src->index);
3654 err = 1;
3657 FOR_EACH_BB (bb)
3659 bool found_ctrl_stmt = false;
3661 stmt = NULL_TREE;
3663 /* Skip labels on the start of basic block. */
3664 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3666 tree prev_stmt = stmt;
3668 stmt = bsi_stmt (bsi);
3670 if (TREE_CODE (stmt) != LABEL_EXPR)
3671 break;
3673 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3675 error ("nonlocal label ");
3676 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3677 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3678 bb->index);
3679 err = 1;
3682 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3684 error ("label ");
3685 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3686 fprintf (stderr, " to block does not match in bb %d",
3687 bb->index);
3688 err = 1;
3691 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3692 != current_function_decl)
3694 error ("label ");
3695 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3696 fprintf (stderr, " has incorrect context in bb %d",
3697 bb->index);
3698 err = 1;
3702 /* Verify that body of basic block BB is free of control flow. */
3703 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3705 tree stmt = bsi_stmt (bsi);
3707 if (found_ctrl_stmt)
3709 error ("control flow in the middle of basic block %d",
3710 bb->index);
3711 err = 1;
3714 if (stmt_ends_bb_p (stmt))
3715 found_ctrl_stmt = true;
3717 if (TREE_CODE (stmt) == LABEL_EXPR)
3719 error ("label ");
3720 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3721 fprintf (stderr, " in the middle of basic block %d", bb->index);
3722 err = 1;
3726 bsi = bsi_last (bb);
3727 if (bsi_end_p (bsi))
3728 continue;
3730 stmt = bsi_stmt (bsi);
3732 err |= verify_eh_edges (stmt);
3734 if (is_ctrl_stmt (stmt))
3736 FOR_EACH_EDGE (e, ei, bb->succs)
3737 if (e->flags & EDGE_FALLTHRU)
3739 error ("fallthru edge after a control statement in bb %d",
3740 bb->index);
3741 err = 1;
3745 switch (TREE_CODE (stmt))
3747 case COND_EXPR:
3749 edge true_edge;
3750 edge false_edge;
3751 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3752 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3754 error ("structured COND_EXPR at the end of bb %d", bb->index);
3755 err = 1;
3758 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3760 if (!true_edge || !false_edge
3761 || !(true_edge->flags & EDGE_TRUE_VALUE)
3762 || !(false_edge->flags & EDGE_FALSE_VALUE)
3763 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3764 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3765 || EDGE_COUNT (bb->succs) >= 3)
3767 error ("wrong outgoing edge flags at end of bb %d",
3768 bb->index);
3769 err = 1;
3772 if (!has_label_p (true_edge->dest,
3773 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3775 error ("%<then%> label does not match edge at end of bb %d",
3776 bb->index);
3777 err = 1;
3780 if (!has_label_p (false_edge->dest,
3781 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3783 error ("%<else%> label does not match edge at end of bb %d",
3784 bb->index);
3785 err = 1;
3788 break;
3790 case GOTO_EXPR:
3791 if (simple_goto_p (stmt))
3793 error ("explicit goto at end of bb %d", bb->index);
3794 err = 1;
3796 else
3798 /* FIXME. We should double check that the labels in the
3799 destination blocks have their address taken. */
3800 FOR_EACH_EDGE (e, ei, bb->succs)
3801 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3802 | EDGE_FALSE_VALUE))
3803 || !(e->flags & EDGE_ABNORMAL))
3805 error ("wrong outgoing edge flags at end of bb %d",
3806 bb->index);
3807 err = 1;
3810 break;
3812 case RETURN_EXPR:
3813 if (!single_succ_p (bb)
3814 || (single_succ_edge (bb)->flags
3815 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3816 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3818 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3819 err = 1;
3821 if (single_succ (bb) != EXIT_BLOCK_PTR)
3823 error ("return edge does not point to exit in bb %d",
3824 bb->index);
3825 err = 1;
3827 break;
3829 case SWITCH_EXPR:
3831 tree prev;
3832 edge e;
3833 size_t i, n;
3834 tree vec;
3836 vec = SWITCH_LABELS (stmt);
3837 n = TREE_VEC_LENGTH (vec);
3839 /* Mark all the destination basic blocks. */
3840 for (i = 0; i < n; ++i)
3842 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3843 basic_block label_bb = label_to_block (lab);
3845 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3846 label_bb->aux = (void *)1;
3849 /* Verify that the case labels are sorted. */
3850 prev = TREE_VEC_ELT (vec, 0);
3851 for (i = 1; i < n - 1; ++i)
3853 tree c = TREE_VEC_ELT (vec, i);
3854 if (! CASE_LOW (c))
3856 error ("found default case not at end of case vector");
3857 err = 1;
3858 continue;
3860 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3862 error ("case labels not sorted: ");
3863 print_generic_expr (stderr, prev, 0);
3864 fprintf (stderr," is greater than ");
3865 print_generic_expr (stderr, c, 0);
3866 fprintf (stderr," but comes before it.\n");
3867 err = 1;
3869 prev = c;
3871 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3873 error ("no default case found at end of case vector");
3874 err = 1;
3877 FOR_EACH_EDGE (e, ei, bb->succs)
3879 if (!e->dest->aux)
3881 error ("extra outgoing edge %d->%d",
3882 bb->index, e->dest->index);
3883 err = 1;
3885 e->dest->aux = (void *)2;
3886 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3887 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3889 error ("wrong outgoing edge flags at end of bb %d",
3890 bb->index);
3891 err = 1;
3895 /* Check that we have all of them. */
3896 for (i = 0; i < n; ++i)
3898 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3899 basic_block label_bb = label_to_block (lab);
3901 if (label_bb->aux != (void *)2)
3903 error ("missing edge %i->%i",
3904 bb->index, label_bb->index);
3905 err = 1;
3909 FOR_EACH_EDGE (e, ei, bb->succs)
3910 e->dest->aux = (void *)0;
3913 default: ;
3917 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3918 verify_dominators (CDI_DOMINATORS);
3920 return err;
3924 /* Updates phi nodes after creating a forwarder block joined
3925 by edge FALLTHRU. */
3927 static void
3928 tree_make_forwarder_block (edge fallthru)
3930 edge e;
3931 edge_iterator ei;
3932 basic_block dummy, bb;
3933 tree phi, new_phi, var;
3935 dummy = fallthru->src;
3936 bb = fallthru->dest;
3938 if (single_pred_p (bb))
3939 return;
3941 /* If we redirected a branch we must create new phi nodes at the
3942 start of BB. */
3943 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3945 var = PHI_RESULT (phi);
3946 new_phi = create_phi_node (var, bb);
3947 SSA_NAME_DEF_STMT (var) = new_phi;
3948 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3949 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3952 /* Ensure that the PHI node chain is in the same order. */
3953 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3955 /* Add the arguments we have stored on edges. */
3956 FOR_EACH_EDGE (e, ei, bb->preds)
3958 if (e == fallthru)
3959 continue;
3961 flush_pending_stmts (e);
3966 /* Return a non-special label in the head of basic block BLOCK.
3967 Create one if it doesn't exist. */
3969 tree
3970 tree_block_label (basic_block bb)
3972 block_stmt_iterator i, s = bsi_start (bb);
3973 bool first = true;
3974 tree label, stmt;
3976 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3978 stmt = bsi_stmt (i);
3979 if (TREE_CODE (stmt) != LABEL_EXPR)
3980 break;
3981 label = LABEL_EXPR_LABEL (stmt);
3982 if (!DECL_NONLOCAL (label))
3984 if (!first)
3985 bsi_move_before (&i, &s);
3986 return label;
3990 label = create_artificial_label ();
3991 stmt = build1 (LABEL_EXPR, void_type_node, label);
3992 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3993 return label;
3997 /* Attempt to perform edge redirection by replacing a possibly complex
3998 jump instruction by a goto or by removing the jump completely.
3999 This can apply only if all edges now point to the same block. The
4000 parameters and return values are equivalent to
4001 redirect_edge_and_branch. */
4003 static edge
4004 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4006 basic_block src = e->src;
4007 block_stmt_iterator b;
4008 tree stmt;
4010 /* We can replace or remove a complex jump only when we have exactly
4011 two edges. */
4012 if (EDGE_COUNT (src->succs) != 2
4013 /* Verify that all targets will be TARGET. Specifically, the
4014 edge that is not E must also go to TARGET. */
4015 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4016 return NULL;
4018 b = bsi_last (src);
4019 if (bsi_end_p (b))
4020 return NULL;
4021 stmt = bsi_stmt (b);
4023 if (TREE_CODE (stmt) == COND_EXPR
4024 || TREE_CODE (stmt) == SWITCH_EXPR)
4026 bsi_remove (&b, true);
4027 e = ssa_redirect_edge (e, target);
4028 e->flags = EDGE_FALLTHRU;
4029 return e;
4032 return NULL;
4036 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4037 edge representing the redirected branch. */
4039 static edge
4040 tree_redirect_edge_and_branch (edge e, basic_block dest)
4042 basic_block bb = e->src;
4043 block_stmt_iterator bsi;
4044 edge ret;
4045 tree label, stmt;
4047 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4048 return NULL;
4050 if (e->src != ENTRY_BLOCK_PTR
4051 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4052 return ret;
4054 if (e->dest == dest)
4055 return NULL;
4057 label = tree_block_label (dest);
4059 bsi = bsi_last (bb);
4060 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4062 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4064 case COND_EXPR:
4065 stmt = (e->flags & EDGE_TRUE_VALUE
4066 ? COND_EXPR_THEN (stmt)
4067 : COND_EXPR_ELSE (stmt));
4068 GOTO_DESTINATION (stmt) = label;
4069 break;
4071 case GOTO_EXPR:
4072 /* No non-abnormal edges should lead from a non-simple goto, and
4073 simple ones should be represented implicitly. */
4074 gcc_unreachable ();
4076 case SWITCH_EXPR:
4078 tree cases = get_cases_for_edge (e, stmt);
4080 /* If we have a list of cases associated with E, then use it
4081 as it's a lot faster than walking the entire case vector. */
4082 if (cases)
4084 edge e2 = find_edge (e->src, dest);
4085 tree last, first;
4087 first = cases;
4088 while (cases)
4090 last = cases;
4091 CASE_LABEL (cases) = label;
4092 cases = TREE_CHAIN (cases);
4095 /* If there was already an edge in the CFG, then we need
4096 to move all the cases associated with E to E2. */
4097 if (e2)
4099 tree cases2 = get_cases_for_edge (e2, stmt);
4101 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4102 TREE_CHAIN (cases2) = first;
4105 else
4107 tree vec = SWITCH_LABELS (stmt);
4108 size_t i, n = TREE_VEC_LENGTH (vec);
4110 for (i = 0; i < n; i++)
4112 tree elt = TREE_VEC_ELT (vec, i);
4114 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4115 CASE_LABEL (elt) = label;
4119 break;
4122 case RETURN_EXPR:
4123 bsi_remove (&bsi, true);
4124 e->flags |= EDGE_FALLTHRU;
4125 break;
4127 default:
4128 /* Otherwise it must be a fallthru edge, and we don't need to
4129 do anything besides redirecting it. */
4130 gcc_assert (e->flags & EDGE_FALLTHRU);
4131 break;
4134 /* Update/insert PHI nodes as necessary. */
4136 /* Now update the edges in the CFG. */
4137 e = ssa_redirect_edge (e, dest);
4139 return e;
4143 /* Simple wrapper, as we can always redirect fallthru edges. */
4145 static basic_block
4146 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4148 e = tree_redirect_edge_and_branch (e, dest);
4149 gcc_assert (e);
4151 return NULL;
4155 /* Splits basic block BB after statement STMT (but at least after the
4156 labels). If STMT is NULL, BB is split just after the labels. */
4158 static basic_block
4159 tree_split_block (basic_block bb, void *stmt)
4161 block_stmt_iterator bsi, bsi_tgt;
4162 tree act;
4163 basic_block new_bb;
4164 edge e;
4165 edge_iterator ei;
4167 new_bb = create_empty_bb (bb);
4169 /* Redirect the outgoing edges. */
4170 new_bb->succs = bb->succs;
4171 bb->succs = NULL;
4172 FOR_EACH_EDGE (e, ei, new_bb->succs)
4173 e->src = new_bb;
4175 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4176 stmt = NULL;
4178 /* Move everything from BSI to the new basic block. */
4179 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4181 act = bsi_stmt (bsi);
4182 if (TREE_CODE (act) == LABEL_EXPR)
4183 continue;
4185 if (!stmt)
4186 break;
4188 if (stmt == act)
4190 bsi_next (&bsi);
4191 break;
4195 bsi_tgt = bsi_start (new_bb);
4196 while (!bsi_end_p (bsi))
4198 act = bsi_stmt (bsi);
4199 bsi_remove (&bsi, false);
4200 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4203 return new_bb;
4207 /* Moves basic block BB after block AFTER. */
4209 static bool
4210 tree_move_block_after (basic_block bb, basic_block after)
4212 if (bb->prev_bb == after)
4213 return true;
4215 unlink_block (bb);
4216 link_block (bb, after);
4218 return true;
4222 /* Return true if basic_block can be duplicated. */
4224 static bool
4225 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4227 return true;
4231 /* Create a duplicate of the basic block BB. NOTE: This does not
4232 preserve SSA form. */
4234 static basic_block
4235 tree_duplicate_bb (basic_block bb)
4237 basic_block new_bb;
4238 block_stmt_iterator bsi, bsi_tgt;
4239 tree phi;
4241 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4243 /* Copy the PHI nodes. We ignore PHI node arguments here because
4244 the incoming edges have not been setup yet. */
4245 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4247 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4248 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4251 /* Keep the chain of PHI nodes in the same order so that they can be
4252 updated by ssa_redirect_edge. */
4253 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4255 bsi_tgt = bsi_start (new_bb);
4256 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4258 def_operand_p def_p;
4259 ssa_op_iter op_iter;
4260 tree stmt, copy;
4261 int region;
4263 stmt = bsi_stmt (bsi);
4264 if (TREE_CODE (stmt) == LABEL_EXPR)
4265 continue;
4267 /* Create a new copy of STMT and duplicate STMT's virtual
4268 operands. */
4269 copy = unshare_expr (stmt);
4270 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4271 copy_virtual_operands (copy, stmt);
4272 region = lookup_stmt_eh_region (stmt);
4273 if (region >= 0)
4274 add_stmt_to_eh_region (copy, region);
4276 /* Create new names for all the definitions created by COPY and
4277 add replacement mappings for each new name. */
4278 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4279 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4282 return new_bb;
4286 /* Basic block BB_COPY was created by code duplication. Add phi node
4287 arguments for edges going out of BB_COPY. The blocks that were
4288 duplicated have BB_DUPLICATED set. */
4290 void
4291 add_phi_args_after_copy_bb (basic_block bb_copy)
4293 basic_block bb, dest;
4294 edge e, e_copy;
4295 edge_iterator ei;
4296 tree phi, phi_copy, phi_next, def;
4298 bb = get_bb_original (bb_copy);
4300 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4302 if (!phi_nodes (e_copy->dest))
4303 continue;
4305 if (e_copy->dest->flags & BB_DUPLICATED)
4306 dest = get_bb_original (e_copy->dest);
4307 else
4308 dest = e_copy->dest;
4310 e = find_edge (bb, dest);
4311 if (!e)
4313 /* During loop unrolling the target of the latch edge is copied.
4314 In this case we are not looking for edge to dest, but to
4315 duplicated block whose original was dest. */
4316 FOR_EACH_EDGE (e, ei, bb->succs)
4317 if ((e->dest->flags & BB_DUPLICATED)
4318 && get_bb_original (e->dest) == dest)
4319 break;
4321 gcc_assert (e != NULL);
4324 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4325 phi;
4326 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4328 phi_next = PHI_CHAIN (phi);
4329 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4330 add_phi_arg (phi_copy, def, e_copy);
4335 /* Blocks in REGION_COPY array of length N_REGION were created by
4336 duplication of basic blocks. Add phi node arguments for edges
4337 going from these blocks. */
4339 void
4340 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4342 unsigned i;
4344 for (i = 0; i < n_region; i++)
4345 region_copy[i]->flags |= BB_DUPLICATED;
4347 for (i = 0; i < n_region; i++)
4348 add_phi_args_after_copy_bb (region_copy[i]);
4350 for (i = 0; i < n_region; i++)
4351 region_copy[i]->flags &= ~BB_DUPLICATED;
4354 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4355 important exit edge EXIT. By important we mean that no SSA name defined
4356 inside region is live over the other exit edges of the region. All entry
4357 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4358 to the duplicate of the region. SSA form, dominance and loop information
4359 is updated. The new basic blocks are stored to REGION_COPY in the same
4360 order as they had in REGION, provided that REGION_COPY is not NULL.
4361 The function returns false if it is unable to copy the region,
4362 true otherwise. */
4364 bool
4365 tree_duplicate_sese_region (edge entry, edge exit,
4366 basic_block *region, unsigned n_region,
4367 basic_block *region_copy)
4369 unsigned i, n_doms;
4370 bool free_region_copy = false, copying_header = false;
4371 struct loop *loop = entry->dest->loop_father;
4372 edge exit_copy;
4373 basic_block *doms;
4374 edge redirected;
4375 int total_freq = 0, entry_freq = 0;
4376 gcov_type total_count = 0, entry_count = 0;
4378 if (!can_copy_bbs_p (region, n_region))
4379 return false;
4381 /* Some sanity checking. Note that we do not check for all possible
4382 missuses of the functions. I.e. if you ask to copy something weird,
4383 it will work, but the state of structures probably will not be
4384 correct. */
4385 for (i = 0; i < n_region; i++)
4387 /* We do not handle subloops, i.e. all the blocks must belong to the
4388 same loop. */
4389 if (region[i]->loop_father != loop)
4390 return false;
4392 if (region[i] != entry->dest
4393 && region[i] == loop->header)
4394 return false;
4397 loop->copy = loop;
4399 /* In case the function is used for loop header copying (which is the primary
4400 use), ensure that EXIT and its copy will be new latch and entry edges. */
4401 if (loop->header == entry->dest)
4403 copying_header = true;
4404 loop->copy = loop->outer;
4406 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4407 return false;
4409 for (i = 0; i < n_region; i++)
4410 if (region[i] != exit->src
4411 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4412 return false;
4415 if (!region_copy)
4417 region_copy = XNEWVEC (basic_block, n_region);
4418 free_region_copy = true;
4421 gcc_assert (!need_ssa_update_p ());
4423 /* Record blocks outside the region that are dominated by something
4424 inside. */
4425 doms = XNEWVEC (basic_block, n_basic_blocks);
4426 initialize_original_copy_tables ();
4428 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4430 if (entry->dest->count)
4432 total_count = entry->dest->count;
4433 entry_count = entry->count;
4434 /* Fix up corner cases, to avoid division by zero or creation of negative
4435 frequencies. */
4436 if (entry_count > total_count)
4437 entry_count = total_count;
4439 else
4441 total_freq = entry->dest->frequency;
4442 entry_freq = EDGE_FREQUENCY (entry);
4443 /* Fix up corner cases, to avoid division by zero or creation of negative
4444 frequencies. */
4445 if (total_freq == 0)
4446 total_freq = 1;
4447 else if (entry_freq > total_freq)
4448 entry_freq = total_freq;
4451 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4452 split_edge_bb_loc (entry));
4453 if (total_count)
4455 scale_bbs_frequencies_gcov_type (region, n_region,
4456 total_count - entry_count,
4457 total_count);
4458 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4459 total_count);
4461 else
4463 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4464 total_freq);
4465 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4468 if (copying_header)
4470 loop->header = exit->dest;
4471 loop->latch = exit->src;
4474 /* Redirect the entry and add the phi node arguments. */
4475 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4476 gcc_assert (redirected != NULL);
4477 flush_pending_stmts (entry);
4479 /* Concerning updating of dominators: We must recount dominators
4480 for entry block and its copy. Anything that is outside of the
4481 region, but was dominated by something inside needs recounting as
4482 well. */
4483 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4484 doms[n_doms++] = get_bb_original (entry->dest);
4485 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4486 free (doms);
4488 /* Add the other PHI node arguments. */
4489 add_phi_args_after_copy (region_copy, n_region);
4491 /* Update the SSA web. */
4492 update_ssa (TODO_update_ssa);
4494 if (free_region_copy)
4495 free (region_copy);
4497 free_original_copy_tables ();
4498 return true;
4502 DEF_VEC_P(basic_block);
4503 DEF_VEC_ALLOC_P(basic_block,heap);
4506 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4507 adding blocks when the dominator traversal reaches EXIT. This
4508 function silently assumes that ENTRY strictly dominates EXIT. */
4510 static void
4511 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4512 VEC(basic_block,heap) **bbs_p)
4514 basic_block son;
4516 for (son = first_dom_son (CDI_DOMINATORS, entry);
4517 son;
4518 son = next_dom_son (CDI_DOMINATORS, son))
4520 VEC_safe_push (basic_block, heap, *bbs_p, son);
4521 if (son != exit)
4522 gather_blocks_in_sese_region (son, exit, bbs_p);
4527 struct move_stmt_d
4529 tree block;
4530 tree from_context;
4531 tree to_context;
4532 bitmap vars_to_remove;
4533 htab_t new_label_map;
4534 bool remap_decls_p;
4537 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4538 contained in *TP and change the DECL_CONTEXT of every local
4539 variable referenced in *TP. */
4541 static tree
4542 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4544 struct move_stmt_d *p = (struct move_stmt_d *) data;
4545 tree t = *tp;
4547 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4548 TREE_BLOCK (t) = p->block;
4550 if (OMP_DIRECTIVE_P (t)
4551 && TREE_CODE (t) != OMP_RETURN
4552 && TREE_CODE (t) != OMP_CONTINUE)
4554 /* Do not remap variables inside OMP directives. Variables
4555 referenced in clauses and directive header belong to the
4556 parent function and should not be moved into the child
4557 function. */
4558 bool save_remap_decls_p = p->remap_decls_p;
4559 p->remap_decls_p = false;
4560 *walk_subtrees = 0;
4562 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4564 p->remap_decls_p = save_remap_decls_p;
4566 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4568 if (TREE_CODE (t) == LABEL_DECL)
4570 if (p->new_label_map)
4572 struct tree_map in, *out;
4573 in.from = t;
4574 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4575 if (out)
4576 *tp = t = out->to;
4579 DECL_CONTEXT (t) = p->to_context;
4581 else if (p->remap_decls_p)
4583 DECL_CONTEXT (t) = p->to_context;
4585 if (TREE_CODE (t) == VAR_DECL)
4587 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4588 f->unexpanded_var_list
4589 = tree_cons (0, t, f->unexpanded_var_list);
4591 /* Mark T to be removed from the original function,
4592 otherwise it will be given a DECL_RTL when the
4593 original function is expanded. */
4594 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4598 else if (TYPE_P (t))
4599 *walk_subtrees = 0;
4601 return NULL_TREE;
4605 /* Move basic block BB from function CFUN to function DEST_FN. The
4606 block is moved out of the original linked list and placed after
4607 block AFTER in the new list. Also, the block is removed from the
4608 original array of blocks and placed in DEST_FN's array of blocks.
4609 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4610 updated to reflect the moved edges.
4612 On exit, local variables that need to be removed from
4613 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4615 static void
4616 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4617 basic_block after, bool update_edge_count_p,
4618 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4620 struct control_flow_graph *cfg;
4621 edge_iterator ei;
4622 edge e;
4623 block_stmt_iterator si;
4624 struct move_stmt_d d;
4625 unsigned old_len, new_len;
4626 basic_block *addr;
4628 /* Link BB to the new linked list. */
4629 move_block_after (bb, after);
4631 /* Update the edge count in the corresponding flowgraphs. */
4632 if (update_edge_count_p)
4633 FOR_EACH_EDGE (e, ei, bb->succs)
4635 cfun->cfg->x_n_edges--;
4636 dest_cfun->cfg->x_n_edges++;
4639 /* Remove BB from the original basic block array. */
4640 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4641 cfun->cfg->x_n_basic_blocks--;
4643 /* Grow DEST_CFUN's basic block array if needed. */
4644 cfg = dest_cfun->cfg;
4645 cfg->x_n_basic_blocks++;
4646 if (bb->index > cfg->x_last_basic_block)
4647 cfg->x_last_basic_block = bb->index;
4649 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4650 if ((unsigned) cfg->x_last_basic_block >= old_len)
4652 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4653 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4654 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4655 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4658 VEC_replace (basic_block, cfg->x_basic_block_info,
4659 cfg->x_last_basic_block, bb);
4661 /* The statements in BB need to be associated with a new TREE_BLOCK.
4662 Labels need to be associated with a new label-to-block map. */
4663 memset (&d, 0, sizeof (d));
4664 d.vars_to_remove = vars_to_remove;
4666 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4668 tree stmt = bsi_stmt (si);
4669 int region;
4671 d.from_context = cfun->decl;
4672 d.to_context = dest_cfun->decl;
4673 d.remap_decls_p = true;
4674 d.new_label_map = new_label_map;
4675 if (TREE_BLOCK (stmt))
4676 d.block = DECL_INITIAL (dest_cfun->decl);
4678 walk_tree (&stmt, move_stmt_r, &d, NULL);
4680 if (TREE_CODE (stmt) == LABEL_EXPR)
4682 tree label = LABEL_EXPR_LABEL (stmt);
4683 int uid = LABEL_DECL_UID (label);
4685 gcc_assert (uid > -1);
4687 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4688 if (old_len <= (unsigned) uid)
4690 new_len = 3 * uid / 2;
4691 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4692 new_len);
4693 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4694 memset (&addr[old_len], 0,
4695 sizeof (basic_block) * (new_len - old_len));
4698 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4699 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4701 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4703 if (uid >= dest_cfun->last_label_uid)
4704 dest_cfun->last_label_uid = uid + 1;
4706 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4707 TREE_OPERAND (stmt, 0) =
4708 build_int_cst (NULL_TREE,
4709 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4710 + eh_offset);
4712 region = lookup_stmt_eh_region (stmt);
4713 if (region >= 0)
4715 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4716 remove_stmt_from_eh_region (stmt);
4721 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4722 the outermost EH region. Use REGION as the incoming base EH region. */
4724 static int
4725 find_outermost_region_in_block (struct function *src_cfun,
4726 basic_block bb, int region)
4728 block_stmt_iterator si;
4730 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4732 tree stmt = bsi_stmt (si);
4733 int stmt_region;
4735 if (TREE_CODE (stmt) == RESX_EXPR)
4736 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4737 else
4738 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4739 if (stmt_region > 0)
4741 if (region < 0)
4742 region = stmt_region;
4743 else if (stmt_region != region)
4745 region = eh_region_outermost (src_cfun, stmt_region, region);
4746 gcc_assert (region != -1);
4751 return region;
4754 static tree
4755 new_label_mapper (tree decl, void *data)
4757 htab_t hash = (htab_t) data;
4758 struct tree_map *m;
4759 void **slot;
4761 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4763 m = xmalloc (sizeof (struct tree_map));
4764 m->hash = DECL_UID (decl);
4765 m->from = decl;
4766 m->to = create_artificial_label ();
4767 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4769 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4770 gcc_assert (*slot == NULL);
4772 *slot = m;
4774 return m->to;
4777 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4778 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4779 single basic block in the original CFG and the new basic block is
4780 returned. DEST_CFUN must not have a CFG yet.
4782 Note that the region need not be a pure SESE region. Blocks inside
4783 the region may contain calls to abort/exit. The only restriction
4784 is that ENTRY_BB should be the only entry point and it must
4785 dominate EXIT_BB.
4787 All local variables referenced in the region are assumed to be in
4788 the corresponding BLOCK_VARS and unexpanded variable lists
4789 associated with DEST_CFUN. */
4791 basic_block
4792 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4793 basic_block exit_bb)
4795 VEC(basic_block,heap) *bbs;
4796 basic_block after, bb, *entry_pred, *exit_succ;
4797 struct function *saved_cfun;
4798 int *entry_flag, *exit_flag, eh_offset;
4799 unsigned i, num_entry_edges, num_exit_edges;
4800 edge e;
4801 edge_iterator ei;
4802 bitmap vars_to_remove;
4803 htab_t new_label_map;
4805 saved_cfun = cfun;
4807 /* Collect all the blocks in the region. Manually add ENTRY_BB
4808 because it won't be added by dfs_enumerate_from. */
4809 calculate_dominance_info (CDI_DOMINATORS);
4811 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4812 region. */
4813 gcc_assert (entry_bb != exit_bb
4814 && (!exit_bb
4815 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4817 bbs = NULL;
4818 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4819 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4821 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4822 the predecessor edges to ENTRY_BB and the successor edges to
4823 EXIT_BB so that we can re-attach them to the new basic block that
4824 will replace the region. */
4825 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4826 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4827 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4828 i = 0;
4829 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4831 entry_flag[i] = e->flags;
4832 entry_pred[i++] = e->src;
4833 remove_edge (e);
4836 if (exit_bb)
4838 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4839 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4840 sizeof (basic_block));
4841 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4842 i = 0;
4843 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4845 exit_flag[i] = e->flags;
4846 exit_succ[i++] = e->dest;
4847 remove_edge (e);
4850 else
4852 num_exit_edges = 0;
4853 exit_succ = NULL;
4854 exit_flag = NULL;
4857 /* Switch context to the child function to initialize DEST_FN's CFG. */
4858 gcc_assert (dest_cfun->cfg == NULL);
4859 cfun = dest_cfun;
4861 init_empty_tree_cfg ();
4863 /* Initialize EH information for the new function. */
4864 eh_offset = 0;
4865 new_label_map = NULL;
4866 if (saved_cfun->eh)
4868 int region = -1;
4870 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4871 region = find_outermost_region_in_block (saved_cfun, bb, region);
4873 init_eh_for_function ();
4874 if (region != -1)
4876 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4877 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4878 new_label_map, region, 0);
4882 cfun = saved_cfun;
4884 /* Move blocks from BBS into DEST_CFUN. */
4885 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4886 after = dest_cfun->cfg->x_entry_block_ptr;
4887 vars_to_remove = BITMAP_ALLOC (NULL);
4888 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4890 /* No need to update edge counts on the last block. It has
4891 already been updated earlier when we detached the region from
4892 the original CFG. */
4893 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4894 new_label_map, eh_offset);
4895 after = bb;
4898 if (new_label_map)
4899 htab_delete (new_label_map);
4901 /* Remove the variables marked in VARS_TO_REMOVE from
4902 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4903 DECL_RTL in the context of CFUN. */
4904 if (!bitmap_empty_p (vars_to_remove))
4906 tree *p;
4908 for (p = &cfun->unexpanded_var_list; *p; )
4910 tree var = TREE_VALUE (*p);
4911 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4913 *p = TREE_CHAIN (*p);
4914 continue;
4917 p = &TREE_CHAIN (*p);
4921 BITMAP_FREE (vars_to_remove);
4923 /* Rewire the entry and exit blocks. The successor to the entry
4924 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4925 the child function. Similarly, the predecessor of DEST_FN's
4926 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4927 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4928 various CFG manipulation function get to the right CFG.
4930 FIXME, this is silly. The CFG ought to become a parameter to
4931 these helpers. */
4932 cfun = dest_cfun;
4933 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4934 if (exit_bb)
4935 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4936 cfun = saved_cfun;
4938 /* Back in the original function, the SESE region has disappeared,
4939 create a new basic block in its place. */
4940 bb = create_empty_bb (entry_pred[0]);
4941 for (i = 0; i < num_entry_edges; i++)
4942 make_edge (entry_pred[i], bb, entry_flag[i]);
4944 for (i = 0; i < num_exit_edges; i++)
4945 make_edge (bb, exit_succ[i], exit_flag[i]);
4947 if (exit_bb)
4949 free (exit_flag);
4950 free (exit_succ);
4952 free (entry_flag);
4953 free (entry_pred);
4954 free_dominance_info (CDI_DOMINATORS);
4955 free_dominance_info (CDI_POST_DOMINATORS);
4956 VEC_free (basic_block, heap, bbs);
4958 return bb;
4962 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4964 void
4965 dump_function_to_file (tree fn, FILE *file, int flags)
4967 tree arg, vars, var;
4968 bool ignore_topmost_bind = false, any_var = false;
4969 basic_block bb;
4970 tree chain;
4971 struct function *saved_cfun;
4973 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4975 arg = DECL_ARGUMENTS (fn);
4976 while (arg)
4978 print_generic_expr (file, arg, dump_flags);
4979 if (TREE_CHAIN (arg))
4980 fprintf (file, ", ");
4981 arg = TREE_CHAIN (arg);
4983 fprintf (file, ")\n");
4985 if (flags & TDF_DETAILS)
4986 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4987 if (flags & TDF_RAW)
4989 dump_node (fn, TDF_SLIM | flags, file);
4990 return;
4993 /* Switch CFUN to point to FN. */
4994 saved_cfun = cfun;
4995 cfun = DECL_STRUCT_FUNCTION (fn);
4997 /* When GIMPLE is lowered, the variables are no longer available in
4998 BIND_EXPRs, so display them separately. */
4999 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5001 ignore_topmost_bind = true;
5003 fprintf (file, "{\n");
5004 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5006 var = TREE_VALUE (vars);
5008 print_generic_decl (file, var, flags);
5009 fprintf (file, "\n");
5011 any_var = true;
5015 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5017 /* Make a CFG based dump. */
5018 check_bb_profile (ENTRY_BLOCK_PTR, file);
5019 if (!ignore_topmost_bind)
5020 fprintf (file, "{\n");
5022 if (any_var && n_basic_blocks)
5023 fprintf (file, "\n");
5025 FOR_EACH_BB (bb)
5026 dump_generic_bb (file, bb, 2, flags);
5028 fprintf (file, "}\n");
5029 check_bb_profile (EXIT_BLOCK_PTR, file);
5031 else
5033 int indent;
5035 /* Make a tree based dump. */
5036 chain = DECL_SAVED_TREE (fn);
5038 if (chain && TREE_CODE (chain) == BIND_EXPR)
5040 if (ignore_topmost_bind)
5042 chain = BIND_EXPR_BODY (chain);
5043 indent = 2;
5045 else
5046 indent = 0;
5048 else
5050 if (!ignore_topmost_bind)
5051 fprintf (file, "{\n");
5052 indent = 2;
5055 if (any_var)
5056 fprintf (file, "\n");
5058 print_generic_stmt_indented (file, chain, flags, indent);
5059 if (ignore_topmost_bind)
5060 fprintf (file, "}\n");
5063 fprintf (file, "\n\n");
5065 /* Restore CFUN. */
5066 cfun = saved_cfun;
5070 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5072 void
5073 debug_function (tree fn, int flags)
5075 dump_function_to_file (fn, stderr, flags);
5079 /* Pretty print of the loops intermediate representation. */
5080 static void print_loop (FILE *, struct loop *, int);
5081 static void print_pred_bbs (FILE *, basic_block bb);
5082 static void print_succ_bbs (FILE *, basic_block bb);
5085 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5087 static void
5088 print_pred_bbs (FILE *file, basic_block bb)
5090 edge e;
5091 edge_iterator ei;
5093 FOR_EACH_EDGE (e, ei, bb->preds)
5094 fprintf (file, "bb_%d ", e->src->index);
5098 /* Print on FILE the indexes for the successors of basic_block BB. */
5100 static void
5101 print_succ_bbs (FILE *file, basic_block bb)
5103 edge e;
5104 edge_iterator ei;
5106 FOR_EACH_EDGE (e, ei, bb->succs)
5107 fprintf (file, "bb_%d ", e->dest->index);
5111 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5113 static void
5114 print_loop (FILE *file, struct loop *loop, int indent)
5116 char *s_indent;
5117 basic_block bb;
5119 if (loop == NULL)
5120 return;
5122 s_indent = (char *) alloca ((size_t) indent + 1);
5123 memset ((void *) s_indent, ' ', (size_t) indent);
5124 s_indent[indent] = '\0';
5126 /* Print the loop's header. */
5127 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5129 /* Print the loop's body. */
5130 fprintf (file, "%s{\n", s_indent);
5131 FOR_EACH_BB (bb)
5132 if (bb->loop_father == loop)
5134 /* Print the basic_block's header. */
5135 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5136 print_pred_bbs (file, bb);
5137 fprintf (file, "}, succs = {");
5138 print_succ_bbs (file, bb);
5139 fprintf (file, "})\n");
5141 /* Print the basic_block's body. */
5142 fprintf (file, "%s {\n", s_indent);
5143 tree_dump_bb (bb, file, indent + 4);
5144 fprintf (file, "%s }\n", s_indent);
5147 print_loop (file, loop->inner, indent + 2);
5148 fprintf (file, "%s}\n", s_indent);
5149 print_loop (file, loop->next, indent);
5153 /* Follow a CFG edge from the entry point of the program, and on entry
5154 of a loop, pretty print the loop structure on FILE. */
5156 void
5157 print_loop_ir (FILE *file)
5159 basic_block bb;
5161 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5162 if (bb && bb->loop_father)
5163 print_loop (file, bb->loop_father, 0);
5167 /* Debugging loops structure at tree level. */
5169 void
5170 debug_loop_ir (void)
5172 print_loop_ir (stderr);
5176 /* Return true if BB ends with a call, possibly followed by some
5177 instructions that must stay with the call. Return false,
5178 otherwise. */
5180 static bool
5181 tree_block_ends_with_call_p (basic_block bb)
5183 block_stmt_iterator bsi = bsi_last (bb);
5184 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5188 /* Return true if BB ends with a conditional branch. Return false,
5189 otherwise. */
5191 static bool
5192 tree_block_ends_with_condjump_p (basic_block bb)
5194 tree stmt = last_stmt (bb);
5195 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5199 /* Return true if we need to add fake edge to exit at statement T.
5200 Helper function for tree_flow_call_edges_add. */
5202 static bool
5203 need_fake_edge_p (tree t)
5205 tree call;
5207 /* NORETURN and LONGJMP calls already have an edge to exit.
5208 CONST and PURE calls do not need one.
5209 We don't currently check for CONST and PURE here, although
5210 it would be a good idea, because those attributes are
5211 figured out from the RTL in mark_constant_function, and
5212 the counter incrementation code from -fprofile-arcs
5213 leads to different results from -fbranch-probabilities. */
5214 call = get_call_expr_in (t);
5215 if (call
5216 && !(call_expr_flags (call) & ECF_NORETURN))
5217 return true;
5219 if (TREE_CODE (t) == ASM_EXPR
5220 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5221 return true;
5223 return false;
5227 /* Add fake edges to the function exit for any non constant and non
5228 noreturn calls, volatile inline assembly in the bitmap of blocks
5229 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5230 the number of blocks that were split.
5232 The goal is to expose cases in which entering a basic block does
5233 not imply that all subsequent instructions must be executed. */
5235 static int
5236 tree_flow_call_edges_add (sbitmap blocks)
5238 int i;
5239 int blocks_split = 0;
5240 int last_bb = last_basic_block;
5241 bool check_last_block = false;
5243 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5244 return 0;
5246 if (! blocks)
5247 check_last_block = true;
5248 else
5249 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5251 /* In the last basic block, before epilogue generation, there will be
5252 a fallthru edge to EXIT. Special care is required if the last insn
5253 of the last basic block is a call because make_edge folds duplicate
5254 edges, which would result in the fallthru edge also being marked
5255 fake, which would result in the fallthru edge being removed by
5256 remove_fake_edges, which would result in an invalid CFG.
5258 Moreover, we can't elide the outgoing fake edge, since the block
5259 profiler needs to take this into account in order to solve the minimal
5260 spanning tree in the case that the call doesn't return.
5262 Handle this by adding a dummy instruction in a new last basic block. */
5263 if (check_last_block)
5265 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5266 block_stmt_iterator bsi = bsi_last (bb);
5267 tree t = NULL_TREE;
5268 if (!bsi_end_p (bsi))
5269 t = bsi_stmt (bsi);
5271 if (t && need_fake_edge_p (t))
5273 edge e;
5275 e = find_edge (bb, EXIT_BLOCK_PTR);
5276 if (e)
5278 bsi_insert_on_edge (e, build_empty_stmt ());
5279 bsi_commit_edge_inserts ();
5284 /* Now add fake edges to the function exit for any non constant
5285 calls since there is no way that we can determine if they will
5286 return or not... */
5287 for (i = 0; i < last_bb; i++)
5289 basic_block bb = BASIC_BLOCK (i);
5290 block_stmt_iterator bsi;
5291 tree stmt, last_stmt;
5293 if (!bb)
5294 continue;
5296 if (blocks && !TEST_BIT (blocks, i))
5297 continue;
5299 bsi = bsi_last (bb);
5300 if (!bsi_end_p (bsi))
5302 last_stmt = bsi_stmt (bsi);
5305 stmt = bsi_stmt (bsi);
5306 if (need_fake_edge_p (stmt))
5308 edge e;
5309 /* The handling above of the final block before the
5310 epilogue should be enough to verify that there is
5311 no edge to the exit block in CFG already.
5312 Calling make_edge in such case would cause us to
5313 mark that edge as fake and remove it later. */
5314 #ifdef ENABLE_CHECKING
5315 if (stmt == last_stmt)
5317 e = find_edge (bb, EXIT_BLOCK_PTR);
5318 gcc_assert (e == NULL);
5320 #endif
5322 /* Note that the following may create a new basic block
5323 and renumber the existing basic blocks. */
5324 if (stmt != last_stmt)
5326 e = split_block (bb, stmt);
5327 if (e)
5328 blocks_split++;
5330 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5332 bsi_prev (&bsi);
5334 while (!bsi_end_p (bsi));
5338 if (blocks_split)
5339 verify_flow_info ();
5341 return blocks_split;
5344 bool
5345 tree_purge_dead_eh_edges (basic_block bb)
5347 bool changed = false;
5348 edge e;
5349 edge_iterator ei;
5350 tree stmt = last_stmt (bb);
5352 if (stmt && tree_can_throw_internal (stmt))
5353 return false;
5355 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5357 if (e->flags & EDGE_EH)
5359 remove_edge (e);
5360 changed = true;
5362 else
5363 ei_next (&ei);
5366 /* Removal of dead EH edges might change dominators of not
5367 just immediate successors. E.g. when bb1 is changed so that
5368 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5369 eh edges purged by this function in:
5373 1-->2
5374 / \ |
5375 v v |
5376 3-->4 |
5378 --->5
5381 idom(bb5) must be recomputed. For now just free the dominance
5382 info. */
5383 if (changed)
5384 free_dominance_info (CDI_DOMINATORS);
5386 return changed;
5389 bool
5390 tree_purge_all_dead_eh_edges (bitmap blocks)
5392 bool changed = false;
5393 unsigned i;
5394 bitmap_iterator bi;
5396 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5398 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5401 return changed;
5404 /* This function is called whenever a new edge is created or
5405 redirected. */
5407 static void
5408 tree_execute_on_growing_pred (edge e)
5410 basic_block bb = e->dest;
5412 if (phi_nodes (bb))
5413 reserve_phi_args_for_new_edge (bb);
5416 /* This function is called immediately before edge E is removed from
5417 the edge vector E->dest->preds. */
5419 static void
5420 tree_execute_on_shrinking_pred (edge e)
5422 if (phi_nodes (e->dest))
5423 remove_phi_args (e);
5426 /*---------------------------------------------------------------------------
5427 Helper functions for Loop versioning
5428 ---------------------------------------------------------------------------*/
5430 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5431 of 'first'. Both of them are dominated by 'new_head' basic block. When
5432 'new_head' was created by 'second's incoming edge it received phi arguments
5433 on the edge by split_edge(). Later, additional edge 'e' was created to
5434 connect 'new_head' and 'first'. Now this routine adds phi args on this
5435 additional edge 'e' that new_head to second edge received as part of edge
5436 splitting.
5439 static void
5440 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5441 basic_block new_head, edge e)
5443 tree phi1, phi2;
5444 edge e2 = find_edge (new_head, second);
5446 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5447 edge, we should always have an edge from NEW_HEAD to SECOND. */
5448 gcc_assert (e2 != NULL);
5450 /* Browse all 'second' basic block phi nodes and add phi args to
5451 edge 'e' for 'first' head. PHI args are always in correct order. */
5453 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5454 phi2 && phi1;
5455 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5457 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5458 add_phi_arg (phi1, def, e);
5462 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5463 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5464 the destination of the ELSE part. */
5465 static void
5466 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5467 basic_block cond_bb, void *cond_e)
5469 block_stmt_iterator bsi;
5470 tree goto1 = NULL_TREE;
5471 tree goto2 = NULL_TREE;
5472 tree new_cond_expr = NULL_TREE;
5473 tree cond_expr = (tree) cond_e;
5474 edge e0;
5476 /* Build new conditional expr */
5477 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5478 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5479 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5481 /* Add new cond in cond_bb. */
5482 bsi = bsi_start (cond_bb);
5483 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5484 /* Adjust edges appropriately to connect new head with first head
5485 as well as second head. */
5486 e0 = single_succ_edge (cond_bb);
5487 e0->flags &= ~EDGE_FALLTHRU;
5488 e0->flags |= EDGE_FALSE_VALUE;
5491 struct cfg_hooks tree_cfg_hooks = {
5492 "tree",
5493 tree_verify_flow_info,
5494 tree_dump_bb, /* dump_bb */
5495 create_bb, /* create_basic_block */
5496 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5497 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5498 remove_bb, /* delete_basic_block */
5499 tree_split_block, /* split_block */
5500 tree_move_block_after, /* move_block_after */
5501 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5502 tree_merge_blocks, /* merge_blocks */
5503 tree_predict_edge, /* predict_edge */
5504 tree_predicted_by_p, /* predicted_by_p */
5505 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5506 tree_duplicate_bb, /* duplicate_block */
5507 tree_split_edge, /* split_edge */
5508 tree_make_forwarder_block, /* make_forward_block */
5509 NULL, /* tidy_fallthru_edge */
5510 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5511 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5512 tree_flow_call_edges_add, /* flow_call_edges_add */
5513 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5514 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5515 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5516 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5517 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5518 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5519 flush_pending_stmts /* flush_pending_stmts */
5523 /* Split all critical edges. */
5525 static unsigned int
5526 split_critical_edges (void)
5528 basic_block bb;
5529 edge e;
5530 edge_iterator ei;
5532 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5533 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5534 mappings around the calls to split_edge. */
5535 start_recording_case_labels ();
5536 FOR_ALL_BB (bb)
5538 FOR_EACH_EDGE (e, ei, bb->succs)
5539 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5541 split_edge (e);
5544 end_recording_case_labels ();
5545 return 0;
5548 struct tree_opt_pass pass_split_crit_edges =
5550 "crited", /* name */
5551 NULL, /* gate */
5552 split_critical_edges, /* execute */
5553 NULL, /* sub */
5554 NULL, /* next */
5555 0, /* static_pass_number */
5556 TV_TREE_SPLIT_EDGES, /* tv_id */
5557 PROP_cfg, /* properties required */
5558 PROP_no_crit_edges, /* properties_provided */
5559 0, /* properties_destroyed */
5560 0, /* todo_flags_start */
5561 TODO_dump_func, /* todo_flags_finish */
5562 0 /* letter */
5566 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5567 a temporary, make sure and register it to be renamed if necessary,
5568 and finally return the temporary. Put the statements to compute
5569 EXP before the current statement in BSI. */
5571 tree
5572 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5574 tree t, new_stmt, orig_stmt;
5576 if (is_gimple_val (exp))
5577 return exp;
5579 t = make_rename_temp (type, NULL);
5580 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5582 orig_stmt = bsi_stmt (*bsi);
5583 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5584 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5586 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5588 return t;
5591 /* Build a ternary operation and gimplify it. Emit code before BSI.
5592 Return the gimple_val holding the result. */
5594 tree
5595 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5596 tree type, tree a, tree b, tree c)
5598 tree ret;
5600 ret = fold_build3 (code, type, a, b, c);
5601 STRIP_NOPS (ret);
5603 return gimplify_val (bsi, type, ret);
5606 /* Build a binary operation and gimplify it. Emit code before BSI.
5607 Return the gimple_val holding the result. */
5609 tree
5610 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5611 tree type, tree a, tree b)
5613 tree ret;
5615 ret = fold_build2 (code, type, a, b);
5616 STRIP_NOPS (ret);
5618 return gimplify_val (bsi, type, ret);
5621 /* Build a unary operation and gimplify it. Emit code before BSI.
5622 Return the gimple_val holding the result. */
5624 tree
5625 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5626 tree a)
5628 tree ret;
5630 ret = fold_build1 (code, type, a);
5631 STRIP_NOPS (ret);
5633 return gimplify_val (bsi, type, ret);
5638 /* Emit return warnings. */
5640 static unsigned int
5641 execute_warn_function_return (void)
5643 #ifdef USE_MAPPED_LOCATION
5644 source_location location;
5645 #else
5646 location_t *locus;
5647 #endif
5648 tree last;
5649 edge e;
5650 edge_iterator ei;
5652 /* If we have a path to EXIT, then we do return. */
5653 if (TREE_THIS_VOLATILE (cfun->decl)
5654 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5656 #ifdef USE_MAPPED_LOCATION
5657 location = UNKNOWN_LOCATION;
5658 #else
5659 locus = NULL;
5660 #endif
5661 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5663 last = last_stmt (e->src);
5664 if (TREE_CODE (last) == RETURN_EXPR
5665 #ifdef USE_MAPPED_LOCATION
5666 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5667 #else
5668 && (locus = EXPR_LOCUS (last)) != NULL)
5669 #endif
5670 break;
5672 #ifdef USE_MAPPED_LOCATION
5673 if (location == UNKNOWN_LOCATION)
5674 location = cfun->function_end_locus;
5675 warning (0, "%H%<noreturn%> function does return", &location);
5676 #else
5677 if (!locus)
5678 locus = &cfun->function_end_locus;
5679 warning (0, "%H%<noreturn%> function does return", locus);
5680 #endif
5683 /* If we see "return;" in some basic block, then we do reach the end
5684 without returning a value. */
5685 else if (warn_return_type
5686 && !TREE_NO_WARNING (cfun->decl)
5687 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5688 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5690 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5692 tree last = last_stmt (e->src);
5693 if (TREE_CODE (last) == RETURN_EXPR
5694 && TREE_OPERAND (last, 0) == NULL
5695 && !TREE_NO_WARNING (last))
5697 #ifdef USE_MAPPED_LOCATION
5698 location = EXPR_LOCATION (last);
5699 if (location == UNKNOWN_LOCATION)
5700 location = cfun->function_end_locus;
5701 warning (0, "%Hcontrol reaches end of non-void function", &location);
5702 #else
5703 locus = EXPR_LOCUS (last);
5704 if (!locus)
5705 locus = &cfun->function_end_locus;
5706 warning (0, "%Hcontrol reaches end of non-void function", locus);
5707 #endif
5708 TREE_NO_WARNING (cfun->decl) = 1;
5709 break;
5713 return 0;
5717 /* Given a basic block B which ends with a conditional and has
5718 precisely two successors, determine which of the edges is taken if
5719 the conditional is true and which is taken if the conditional is
5720 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5722 void
5723 extract_true_false_edges_from_block (basic_block b,
5724 edge *true_edge,
5725 edge *false_edge)
5727 edge e = EDGE_SUCC (b, 0);
5729 if (e->flags & EDGE_TRUE_VALUE)
5731 *true_edge = e;
5732 *false_edge = EDGE_SUCC (b, 1);
5734 else
5736 *false_edge = e;
5737 *true_edge = EDGE_SUCC (b, 1);
5741 struct tree_opt_pass pass_warn_function_return =
5743 NULL, /* name */
5744 NULL, /* gate */
5745 execute_warn_function_return, /* execute */
5746 NULL, /* sub */
5747 NULL, /* next */
5748 0, /* static_pass_number */
5749 0, /* tv_id */
5750 PROP_cfg, /* properties_required */
5751 0, /* properties_provided */
5752 0, /* properties_destroyed */
5753 0, /* todo_flags_start */
5754 0, /* todo_flags_finish */
5755 0 /* letter */
5758 /* Emit noreturn warnings. */
5760 static unsigned int
5761 execute_warn_function_noreturn (void)
5763 if (warn_missing_noreturn
5764 && !TREE_THIS_VOLATILE (cfun->decl)
5765 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5766 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5767 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5768 "for attribute %<noreturn%>",
5769 cfun->decl);
5770 return 0;
5773 struct tree_opt_pass pass_warn_function_noreturn =
5775 NULL, /* name */
5776 NULL, /* gate */
5777 execute_warn_function_noreturn, /* execute */
5778 NULL, /* sub */
5779 NULL, /* next */
5780 0, /* static_pass_number */
5781 0, /* tv_id */
5782 PROP_cfg, /* properties_required */
5783 0, /* properties_provided */
5784 0, /* properties_destroyed */
5785 0, /* todo_flags_start */
5786 0, /* todo_flags_finish */
5787 0 /* letter */