gcc/ChangeLog:
[official-gcc.git] / gcc / tree-cfg.c
blob6f4920525015b302201988bd924d578d2b7587b1
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
49 #include "value-prof.h"
50 #include "pointer-set.h"
52 /* This file contains functions for building the Control Flow Graph (CFG)
53 for a function tree. */
55 /* Local declarations. */
57 /* Initial capacity for the basic block array. */
58 static const int initial_cfg_capacity = 20;
60 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
61 which use a particular edge. The CASE_LABEL_EXPRs are chained together
62 via their TREE_CHAIN field, which we clear after we're done with the
63 hash table to prevent problems with duplication of SWITCH_EXPRs.
65 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
66 update the case vector in response to edge redirections.
68 Right now this table is set up and torn down at key points in the
69 compilation process. It would be nice if we could make the table
70 more persistent. The key is getting notification of changes to
71 the CFG (particularly edge removal, creation and redirection). */
73 struct edge_to_cases_elt
75 /* The edge itself. Necessary for hashing and equality tests. */
76 edge e;
78 /* The case labels associated with this edge. We link these up via
79 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
80 when we destroy the hash table. This prevents problems when copying
81 SWITCH_EXPRs. */
82 tree case_labels;
85 static htab_t edge_to_cases;
87 /* CFG statistics. */
88 struct cfg_stats_d
90 long num_merged_labels;
93 static struct cfg_stats_d cfg_stats;
95 /* Nonzero if we found a computed goto while building basic blocks. */
96 static bool found_computed_goto;
98 /* Basic blocks and flowgraphs. */
99 static basic_block create_bb (void *, void *, basic_block);
100 static void make_blocks (tree);
101 static void factor_computed_gotos (void);
103 /* Edges. */
104 static void make_edges (void);
105 static void make_cond_expr_edges (basic_block);
106 static void make_switch_expr_edges (basic_block);
107 static void make_goto_expr_edges (basic_block);
108 static edge tree_redirect_edge_and_branch (edge, basic_block);
109 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
110 static unsigned int split_critical_edges (void);
112 /* Various helpers. */
113 static inline bool stmt_starts_bb_p (tree, tree);
114 static int tree_verify_flow_info (void);
115 static void tree_make_forwarder_block (edge);
116 static void tree_cfg2vcg (FILE *);
117 static inline void change_bb_for_stmt (tree t, basic_block bb);
119 /* Flowgraph optimization and cleanup. */
120 static void tree_merge_blocks (basic_block, basic_block);
121 static bool tree_can_merge_blocks_p (basic_block, basic_block);
122 static void remove_bb (basic_block);
123 static edge find_taken_edge_computed_goto (basic_block, tree);
124 static edge find_taken_edge_cond_expr (basic_block, tree);
125 static edge find_taken_edge_switch_expr (basic_block, tree);
126 static tree find_case_label_for_value (tree, tree);
128 void
129 init_empty_tree_cfg (void)
131 /* Initialize the basic block array. */
132 init_flow ();
133 profile_status = PROFILE_ABSENT;
134 n_basic_blocks = NUM_FIXED_BLOCKS;
135 last_basic_block = NUM_FIXED_BLOCKS;
136 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
137 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
138 initial_cfg_capacity);
140 /* Build a mapping of labels to their associated blocks. */
141 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
143 initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
195 /* Create the edges of the flowgraph. */
196 make_edges ();
198 /* Debugging dumps. */
200 /* Write the flowgraph to a VCG file. */
202 int local_dump_flags;
203 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
204 if (vcg_file)
206 tree_cfg2vcg (vcg_file);
207 dump_end (TDI_vcg, vcg_file);
211 #ifdef ENABLE_CHECKING
212 verify_stmts ();
213 #endif
215 /* Dump a textual representation of the flowgraph. */
216 if (dump_file)
217 dump_tree_cfg (dump_file, dump_flags);
220 static unsigned int
221 execute_build_cfg (void)
223 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
224 return 0;
227 struct tree_opt_pass pass_build_cfg =
229 "cfg", /* name */
230 NULL, /* gate */
231 execute_build_cfg, /* execute */
232 NULL, /* sub */
233 NULL, /* next */
234 0, /* static_pass_number */
235 TV_TREE_CFG, /* tv_id */
236 PROP_gimple_leh, /* properties_required */
237 PROP_cfg, /* properties_provided */
238 0, /* properties_destroyed */
239 0, /* todo_flags_start */
240 TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
241 0 /* letter */
244 /* Search the CFG for any computed gotos. If found, factor them to a
245 common computed goto site. Also record the location of that site so
246 that we can un-factor the gotos after we have converted back to
247 normal form. */
249 static void
250 factor_computed_gotos (void)
252 basic_block bb;
253 tree factored_label_decl = NULL;
254 tree var = NULL;
255 tree factored_computed_goto_label = NULL;
256 tree factored_computed_goto = NULL;
258 /* We know there are one or more computed gotos in this function.
259 Examine the last statement in each basic block to see if the block
260 ends with a computed goto. */
262 FOR_EACH_BB (bb)
264 block_stmt_iterator bsi = bsi_last (bb);
265 tree last;
267 if (bsi_end_p (bsi))
268 continue;
269 last = bsi_stmt (bsi);
271 /* Ignore the computed goto we create when we factor the original
272 computed gotos. */
273 if (last == factored_computed_goto)
274 continue;
276 /* If the last statement is a computed goto, factor it. */
277 if (computed_goto_p (last))
279 tree assignment;
281 /* The first time we find a computed goto we need to create
282 the factored goto block and the variable each original
283 computed goto will use for their goto destination. */
284 if (! factored_computed_goto)
286 basic_block new_bb = create_empty_bb (bb);
287 block_stmt_iterator new_bsi = bsi_start (new_bb);
289 /* Create the destination of the factored goto. Each original
290 computed goto will put its desired destination into this
291 variable and jump to the label we create immediately
292 below. */
293 var = create_tmp_var (ptr_type_node, "gotovar");
295 /* Build a label for the new block which will contain the
296 factored computed goto. */
297 factored_label_decl = create_artificial_label ();
298 factored_computed_goto_label
299 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
300 bsi_insert_after (&new_bsi, factored_computed_goto_label,
301 BSI_NEW_STMT);
303 /* Build our new computed goto. */
304 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
305 bsi_insert_after (&new_bsi, factored_computed_goto,
306 BSI_NEW_STMT);
309 /* Copy the original computed goto's destination into VAR. */
310 assignment = build2_gimple (GIMPLE_MODIFY_STMT,
311 var, GOTO_DESTINATION (last));
312 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
314 /* And re-vector the computed goto to the new destination. */
315 GOTO_DESTINATION (last) = factored_label_decl;
321 /* Build a flowgraph for the statement_list STMT_LIST. */
323 static void
324 make_blocks (tree stmt_list)
326 tree_stmt_iterator i = tsi_start (stmt_list);
327 tree stmt = NULL;
328 bool start_new_block = true;
329 bool first_stmt_of_list = true;
330 basic_block bb = ENTRY_BLOCK_PTR;
332 while (!tsi_end_p (i))
334 tree prev_stmt;
336 prev_stmt = stmt;
337 stmt = tsi_stmt (i);
339 /* If the statement starts a new basic block or if we have determined
340 in a previous pass that we need to create a new block for STMT, do
341 so now. */
342 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
344 if (!first_stmt_of_list)
345 stmt_list = tsi_split_statement_list_before (&i);
346 bb = create_basic_block (stmt_list, NULL, bb);
347 start_new_block = false;
350 /* Now add STMT to BB and create the subgraphs for special statement
351 codes. */
352 set_bb_for_stmt (stmt, bb);
354 if (computed_goto_p (stmt))
355 found_computed_goto = true;
357 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
358 next iteration. */
359 if (stmt_ends_bb_p (stmt))
360 start_new_block = true;
362 tsi_next (&i);
363 first_stmt_of_list = false;
368 /* Create and return a new empty basic block after bb AFTER. */
370 static basic_block
371 create_bb (void *h, void *e, basic_block after)
373 basic_block bb;
375 gcc_assert (!e);
377 /* Create and initialize a new basic block. Since alloc_block uses
378 ggc_alloc_cleared to allocate a basic block, we do not have to
379 clear the newly allocated basic block here. */
380 bb = alloc_block ();
382 bb->index = last_basic_block;
383 bb->flags = BB_NEW;
384 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
386 /* Add the new block to the linked list of blocks. */
387 link_block (bb, after);
389 /* Grow the basic block array if needed. */
390 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
392 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
393 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
396 /* Add the newly created block to the array. */
397 SET_BASIC_BLOCK (last_basic_block, bb);
399 n_basic_blocks++;
400 last_basic_block++;
402 return bb;
406 /*---------------------------------------------------------------------------
407 Edge creation
408 ---------------------------------------------------------------------------*/
410 /* Fold COND_EXPR_COND of each COND_EXPR. */
412 void
413 fold_cond_expr_cond (void)
415 basic_block bb;
417 FOR_EACH_BB (bb)
419 tree stmt = last_stmt (bb);
421 if (stmt
422 && TREE_CODE (stmt) == COND_EXPR)
424 tree cond = fold (COND_EXPR_COND (stmt));
425 if (integer_zerop (cond))
426 COND_EXPR_COND (stmt) = boolean_false_node;
427 else if (integer_onep (cond))
428 COND_EXPR_COND (stmt) = boolean_true_node;
433 /* Join all the blocks in the flowgraph. */
435 static void
436 make_edges (void)
438 basic_block bb;
439 struct omp_region *cur_region = NULL;
441 /* Create an edge from entry to the first block with executable
442 statements in it. */
443 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
445 /* Traverse the basic block array placing edges. */
446 FOR_EACH_BB (bb)
448 tree last = last_stmt (bb);
449 bool fallthru;
451 if (last)
453 enum tree_code code = TREE_CODE (last);
454 switch (code)
456 case GOTO_EXPR:
457 make_goto_expr_edges (bb);
458 fallthru = false;
459 break;
460 case RETURN_EXPR:
461 make_edge (bb, EXIT_BLOCK_PTR, 0);
462 fallthru = false;
463 break;
464 case COND_EXPR:
465 make_cond_expr_edges (bb);
466 fallthru = false;
467 break;
468 case SWITCH_EXPR:
469 make_switch_expr_edges (bb);
470 fallthru = false;
471 break;
472 case RESX_EXPR:
473 make_eh_edges (last);
474 fallthru = false;
475 break;
477 case CALL_EXPR:
478 /* If this function receives a nonlocal goto, then we need to
479 make edges from this call site to all the nonlocal goto
480 handlers. */
481 if (tree_can_make_abnormal_goto (last))
482 make_abnormal_goto_edges (bb, true);
484 /* If this statement has reachable exception handlers, then
485 create abnormal edges to them. */
486 make_eh_edges (last);
488 /* Some calls are known not to return. */
489 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
490 break;
492 case MODIFY_EXPR:
493 gcc_unreachable ();
495 case GIMPLE_MODIFY_STMT:
496 if (is_ctrl_altering_stmt (last))
498 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
499 the CALL_EXPR may have an abnormal edge. Search the RHS
500 for this case and create any required edges. */
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
504 make_eh_edges (last);
506 fallthru = true;
507 break;
509 case OMP_PARALLEL:
510 case OMP_FOR:
511 case OMP_SINGLE:
512 case OMP_MASTER:
513 case OMP_ORDERED:
514 case OMP_CRITICAL:
515 case OMP_SECTION:
516 cur_region = new_omp_region (bb, code, cur_region);
517 fallthru = true;
518 break;
520 case OMP_SECTIONS:
521 cur_region = new_omp_region (bb, code, cur_region);
522 fallthru = false;
523 break;
525 case OMP_RETURN:
526 /* In the case of an OMP_SECTION, the edge will go somewhere
527 other than the next block. This will be created later. */
528 cur_region->exit = bb;
529 fallthru = cur_region->type != OMP_SECTION;
530 cur_region = cur_region->outer;
531 break;
533 case OMP_CONTINUE:
534 cur_region->cont = bb;
535 switch (cur_region->type)
537 case OMP_FOR:
538 /* ??? Technically there should be a some sort of loopback
539 edge here, but it goes to a block that doesn't exist yet,
540 and without it, updating the ssa form would be a real
541 bear. Fortunately, we don't yet do ssa before expanding
542 these nodes. */
543 break;
545 case OMP_SECTIONS:
546 /* Wire up the edges into and out of the nested sections. */
547 /* ??? Similarly wrt loopback. */
549 struct omp_region *i;
550 for (i = cur_region->inner; i ; i = i->next)
552 gcc_assert (i->type == OMP_SECTION);
553 make_edge (cur_region->entry, i->entry, 0);
554 make_edge (i->exit, bb, EDGE_FALLTHRU);
557 break;
559 default:
560 gcc_unreachable ();
562 fallthru = true;
563 break;
565 default:
566 gcc_assert (!stmt_ends_bb_p (last));
567 fallthru = true;
570 else
571 fallthru = true;
573 if (fallthru)
574 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
577 if (root_omp_region)
578 free_omp_regions ();
580 /* Fold COND_EXPR_COND of each COND_EXPR. */
581 fold_cond_expr_cond ();
585 /* Create the edges for a COND_EXPR starting at block BB.
586 At this point, both clauses must contain only simple gotos. */
588 static void
589 make_cond_expr_edges (basic_block bb)
591 tree entry = last_stmt (bb);
592 basic_block then_bb, else_bb;
593 tree then_label, else_label;
594 edge e;
596 gcc_assert (entry);
597 gcc_assert (TREE_CODE (entry) == COND_EXPR);
599 /* Entry basic blocks for each component. */
600 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
601 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
602 then_bb = label_to_block (then_label);
603 else_bb = label_to_block (else_label);
605 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
606 #ifdef USE_MAPPED_LOCATION
607 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
608 #else
609 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
610 #endif
611 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
612 if (e)
614 #ifdef USE_MAPPED_LOCATION
615 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
616 #else
617 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
618 #endif
622 /* Hashing routine for EDGE_TO_CASES. */
624 static hashval_t
625 edge_to_cases_hash (const void *p)
627 edge e = ((struct edge_to_cases_elt *)p)->e;
629 /* Hash on the edge itself (which is a pointer). */
630 return htab_hash_pointer (e);
633 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
634 for equality is just a pointer comparison. */
636 static int
637 edge_to_cases_eq (const void *p1, const void *p2)
639 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
640 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
642 return e1 == e2;
645 /* Called for each element in the hash table (P) as we delete the
646 edge to cases hash table.
648 Clear all the TREE_CHAINs to prevent problems with copying of
649 SWITCH_EXPRs and structure sharing rules, then free the hash table
650 element. */
652 static void
653 edge_to_cases_cleanup (void *p)
655 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
656 tree t, next;
658 for (t = elt->case_labels; t; t = next)
660 next = TREE_CHAIN (t);
661 TREE_CHAIN (t) = NULL;
663 free (p);
666 /* Start recording information mapping edges to case labels. */
668 void
669 start_recording_case_labels (void)
671 gcc_assert (edge_to_cases == NULL);
673 edge_to_cases = htab_create (37,
674 edge_to_cases_hash,
675 edge_to_cases_eq,
676 edge_to_cases_cleanup);
679 /* Return nonzero if we are recording information for case labels. */
681 static bool
682 recording_case_labels_p (void)
684 return (edge_to_cases != NULL);
687 /* Stop recording information mapping edges to case labels and
688 remove any information we have recorded. */
689 void
690 end_recording_case_labels (void)
692 htab_delete (edge_to_cases);
693 edge_to_cases = NULL;
696 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
698 static void
699 record_switch_edge (edge e, tree case_label)
701 struct edge_to_cases_elt *elt;
702 void **slot;
704 /* Build a hash table element so we can see if E is already
705 in the table. */
706 elt = XNEW (struct edge_to_cases_elt);
707 elt->e = e;
708 elt->case_labels = case_label;
710 slot = htab_find_slot (edge_to_cases, elt, INSERT);
712 if (*slot == NULL)
714 /* E was not in the hash table. Install E into the hash table. */
715 *slot = (void *)elt;
717 else
719 /* E was already in the hash table. Free ELT as we do not need it
720 anymore. */
721 free (elt);
723 /* Get the entry stored in the hash table. */
724 elt = (struct edge_to_cases_elt *) *slot;
726 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
727 TREE_CHAIN (case_label) = elt->case_labels;
728 elt->case_labels = case_label;
732 /* If we are inside a {start,end}_recording_cases block, then return
733 a chain of CASE_LABEL_EXPRs from T which reference E.
735 Otherwise return NULL. */
737 static tree
738 get_cases_for_edge (edge e, tree t)
740 struct edge_to_cases_elt elt, *elt_p;
741 void **slot;
742 size_t i, n;
743 tree vec;
745 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
746 chains available. Return NULL so the caller can detect this case. */
747 if (!recording_case_labels_p ())
748 return NULL;
750 restart:
751 elt.e = e;
752 elt.case_labels = NULL;
753 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
755 if (slot)
757 elt_p = (struct edge_to_cases_elt *)*slot;
758 return elt_p->case_labels;
761 /* If we did not find E in the hash table, then this must be the first
762 time we have been queried for information about E & T. Add all the
763 elements from T to the hash table then perform the query again. */
765 vec = SWITCH_LABELS (t);
766 n = TREE_VEC_LENGTH (vec);
767 for (i = 0; i < n; i++)
769 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
770 basic_block label_bb = label_to_block (lab);
771 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
773 goto restart;
776 /* Create the edges for a SWITCH_EXPR starting at block BB.
777 At this point, the switch body has been lowered and the
778 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
780 static void
781 make_switch_expr_edges (basic_block bb)
783 tree entry = last_stmt (bb);
784 size_t i, n;
785 tree vec;
787 vec = SWITCH_LABELS (entry);
788 n = TREE_VEC_LENGTH (vec);
790 for (i = 0; i < n; ++i)
792 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
793 basic_block label_bb = label_to_block (lab);
794 make_edge (bb, label_bb, 0);
799 /* Return the basic block holding label DEST. */
801 basic_block
802 label_to_block_fn (struct function *ifun, tree dest)
804 int uid = LABEL_DECL_UID (dest);
806 /* We would die hard when faced by an undefined label. Emit a label to
807 the very first basic block. This will hopefully make even the dataflow
808 and undefined variable warnings quite right. */
809 if ((errorcount || sorrycount) && uid < 0)
811 block_stmt_iterator bsi =
812 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
813 tree stmt;
815 stmt = build1 (LABEL_EXPR, void_type_node, dest);
816 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
817 uid = LABEL_DECL_UID (dest);
819 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
820 <= (unsigned int) uid)
821 return NULL;
822 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
825 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
826 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
828 void
829 make_abnormal_goto_edges (basic_block bb, bool for_call)
831 basic_block target_bb;
832 block_stmt_iterator bsi;
834 FOR_EACH_BB (target_bb)
835 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
837 tree target = bsi_stmt (bsi);
839 if (TREE_CODE (target) != LABEL_EXPR)
840 break;
842 target = LABEL_EXPR_LABEL (target);
844 /* Make an edge to every label block that has been marked as a
845 potential target for a computed goto or a non-local goto. */
846 if ((FORCED_LABEL (target) && !for_call)
847 || (DECL_NONLOCAL (target) && for_call))
849 make_edge (bb, target_bb, EDGE_ABNORMAL);
850 break;
855 /* Create edges for a goto statement at block BB. */
857 static void
858 make_goto_expr_edges (basic_block bb)
860 block_stmt_iterator last = bsi_last (bb);
861 tree goto_t = bsi_stmt (last);
863 /* A simple GOTO creates normal edges. */
864 if (simple_goto_p (goto_t))
866 tree dest = GOTO_DESTINATION (goto_t);
867 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
868 #ifdef USE_MAPPED_LOCATION
869 e->goto_locus = EXPR_LOCATION (goto_t);
870 #else
871 e->goto_locus = EXPR_LOCUS (goto_t);
872 #endif
873 bsi_remove (&last, true);
874 return;
877 /* A computed GOTO creates abnormal edges. */
878 make_abnormal_goto_edges (bb, false);
882 /*---------------------------------------------------------------------------
883 Flowgraph analysis
884 ---------------------------------------------------------------------------*/
886 /* Cleanup useless labels in basic blocks. This is something we wish
887 to do early because it allows us to group case labels before creating
888 the edges for the CFG, and it speeds up block statement iterators in
889 all passes later on.
890 We only run this pass once, running it more than once is probably not
891 profitable. */
893 /* A map from basic block index to the leading label of that block. */
894 static tree *label_for_bb;
896 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
897 static void
898 update_eh_label (struct eh_region *region)
900 tree old_label = get_eh_region_tree_label (region);
901 if (old_label)
903 tree new_label;
904 basic_block bb = label_to_block (old_label);
906 /* ??? After optimizing, there may be EH regions with labels
907 that have already been removed from the function body, so
908 there is no basic block for them. */
909 if (! bb)
910 return;
912 new_label = label_for_bb[bb->index];
913 set_eh_region_tree_label (region, new_label);
917 /* Given LABEL return the first label in the same basic block. */
918 static tree
919 main_block_label (tree label)
921 basic_block bb = label_to_block (label);
923 /* label_to_block possibly inserted undefined label into the chain. */
924 if (!label_for_bb[bb->index])
925 label_for_bb[bb->index] = label;
926 return label_for_bb[bb->index];
929 /* Cleanup redundant labels. This is a three-step process:
930 1) Find the leading label for each block.
931 2) Redirect all references to labels to the leading labels.
932 3) Cleanup all useless labels. */
934 void
935 cleanup_dead_labels (void)
937 basic_block bb;
938 label_for_bb = XCNEWVEC (tree, last_basic_block);
940 /* Find a suitable label for each block. We use the first user-defined
941 label if there is one, or otherwise just the first label we see. */
942 FOR_EACH_BB (bb)
944 block_stmt_iterator i;
946 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
948 tree label, stmt = bsi_stmt (i);
950 if (TREE_CODE (stmt) != LABEL_EXPR)
951 break;
953 label = LABEL_EXPR_LABEL (stmt);
955 /* If we have not yet seen a label for the current block,
956 remember this one and see if there are more labels. */
957 if (! label_for_bb[bb->index])
959 label_for_bb[bb->index] = label;
960 continue;
963 /* If we did see a label for the current block already, but it
964 is an artificially created label, replace it if the current
965 label is a user defined label. */
966 if (! DECL_ARTIFICIAL (label)
967 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
969 label_for_bb[bb->index] = label;
970 break;
975 /* Now redirect all jumps/branches to the selected label.
976 First do so for each block ending in a control statement. */
977 FOR_EACH_BB (bb)
979 tree stmt = last_stmt (bb);
980 if (!stmt)
981 continue;
983 switch (TREE_CODE (stmt))
985 case COND_EXPR:
987 tree true_branch, false_branch;
989 true_branch = COND_EXPR_THEN (stmt);
990 false_branch = COND_EXPR_ELSE (stmt);
992 GOTO_DESTINATION (true_branch)
993 = main_block_label (GOTO_DESTINATION (true_branch));
994 GOTO_DESTINATION (false_branch)
995 = main_block_label (GOTO_DESTINATION (false_branch));
997 break;
1000 case SWITCH_EXPR:
1002 size_t i;
1003 tree vec = SWITCH_LABELS (stmt);
1004 size_t n = TREE_VEC_LENGTH (vec);
1006 /* Replace all destination labels. */
1007 for (i = 0; i < n; ++i)
1009 tree elt = TREE_VEC_ELT (vec, i);
1010 tree label = main_block_label (CASE_LABEL (elt));
1011 CASE_LABEL (elt) = label;
1013 break;
1016 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1017 remove them until after we've created the CFG edges. */
1018 case GOTO_EXPR:
1019 if (! computed_goto_p (stmt))
1021 GOTO_DESTINATION (stmt)
1022 = main_block_label (GOTO_DESTINATION (stmt));
1023 break;
1026 default:
1027 break;
1031 for_each_eh_region (update_eh_label);
1033 /* Finally, purge dead labels. All user-defined labels and labels that
1034 can be the target of non-local gotos and labels which have their
1035 address taken are preserved. */
1036 FOR_EACH_BB (bb)
1038 block_stmt_iterator i;
1039 tree label_for_this_bb = label_for_bb[bb->index];
1041 if (! label_for_this_bb)
1042 continue;
1044 for (i = bsi_start (bb); !bsi_end_p (i); )
1046 tree label, stmt = bsi_stmt (i);
1048 if (TREE_CODE (stmt) != LABEL_EXPR)
1049 break;
1051 label = LABEL_EXPR_LABEL (stmt);
1053 if (label == label_for_this_bb
1054 || ! DECL_ARTIFICIAL (label)
1055 || DECL_NONLOCAL (label)
1056 || FORCED_LABEL (label))
1057 bsi_next (&i);
1058 else
1059 bsi_remove (&i, true);
1063 free (label_for_bb);
1066 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1067 and scan the sorted vector of cases. Combine the ones jumping to the
1068 same label.
1069 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1071 void
1072 group_case_labels (void)
1074 basic_block bb;
1076 FOR_EACH_BB (bb)
1078 tree stmt = last_stmt (bb);
1079 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1081 tree labels = SWITCH_LABELS (stmt);
1082 int old_size = TREE_VEC_LENGTH (labels);
1083 int i, j, new_size = old_size;
1084 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1085 tree default_label;
1087 /* The default label is always the last case in a switch
1088 statement after gimplification. */
1089 default_label = CASE_LABEL (default_case);
1091 /* Look for possible opportunities to merge cases.
1092 Ignore the last element of the label vector because it
1093 must be the default case. */
1094 i = 0;
1095 while (i < old_size - 1)
1097 tree base_case, base_label, base_high;
1098 base_case = TREE_VEC_ELT (labels, i);
1100 gcc_assert (base_case);
1101 base_label = CASE_LABEL (base_case);
1103 /* Discard cases that have the same destination as the
1104 default case. */
1105 if (base_label == default_label)
1107 TREE_VEC_ELT (labels, i) = NULL_TREE;
1108 i++;
1109 new_size--;
1110 continue;
1113 base_high = CASE_HIGH (base_case) ?
1114 CASE_HIGH (base_case) : CASE_LOW (base_case);
1115 i++;
1116 /* Try to merge case labels. Break out when we reach the end
1117 of the label vector or when we cannot merge the next case
1118 label with the current one. */
1119 while (i < old_size - 1)
1121 tree merge_case = TREE_VEC_ELT (labels, i);
1122 tree merge_label = CASE_LABEL (merge_case);
1123 tree t = int_const_binop (PLUS_EXPR, base_high,
1124 integer_one_node, 1);
1126 /* Merge the cases if they jump to the same place,
1127 and their ranges are consecutive. */
1128 if (merge_label == base_label
1129 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1131 base_high = CASE_HIGH (merge_case) ?
1132 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1133 CASE_HIGH (base_case) = base_high;
1134 TREE_VEC_ELT (labels, i) = NULL_TREE;
1135 new_size--;
1136 i++;
1138 else
1139 break;
1143 /* Compress the case labels in the label vector, and adjust the
1144 length of the vector. */
1145 for (i = 0, j = 0; i < new_size; i++)
1147 while (! TREE_VEC_ELT (labels, j))
1148 j++;
1149 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1151 TREE_VEC_LENGTH (labels) = new_size;
1156 /* Checks whether we can merge block B into block A. */
1158 static bool
1159 tree_can_merge_blocks_p (basic_block a, basic_block b)
1161 tree stmt;
1162 block_stmt_iterator bsi;
1163 tree phi;
1165 if (!single_succ_p (a))
1166 return false;
1168 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1169 return false;
1171 if (single_succ (a) != b)
1172 return false;
1174 if (!single_pred_p (b))
1175 return false;
1177 if (b == EXIT_BLOCK_PTR)
1178 return false;
1180 /* If A ends by a statement causing exceptions or something similar, we
1181 cannot merge the blocks. */
1182 stmt = last_stmt (a);
1183 if (stmt && stmt_ends_bb_p (stmt))
1184 return false;
1186 /* Do not allow a block with only a non-local label to be merged. */
1187 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1188 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1189 return false;
1191 /* It must be possible to eliminate all phi nodes in B. If ssa form
1192 is not up-to-date, we cannot eliminate any phis; however, if only
1193 some symbols as whole are marked for renaming, this is not a problem,
1194 as phi nodes for those symbols are irrelevant in updating anyway. */
1195 phi = phi_nodes (b);
1196 if (phi)
1198 if (name_mappings_registered_p ())
1199 return false;
1201 for (; phi; phi = PHI_CHAIN (phi))
1202 if (!is_gimple_reg (PHI_RESULT (phi))
1203 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1204 return false;
1207 /* Do not remove user labels. */
1208 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1210 stmt = bsi_stmt (bsi);
1211 if (TREE_CODE (stmt) != LABEL_EXPR)
1212 break;
1213 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1214 return false;
1217 /* Protect the loop latches. */
1218 if (current_loops
1219 && b->loop_father->latch == b)
1220 return false;
1222 return true;
1225 /* Replaces all uses of NAME by VAL. */
1227 void
1228 replace_uses_by (tree name, tree val)
1230 imm_use_iterator imm_iter;
1231 use_operand_p use;
1232 tree stmt;
1233 edge e;
1235 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1237 if (TREE_CODE (stmt) != PHI_NODE)
1238 push_stmt_changes (&stmt);
1240 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1242 replace_exp (use, val);
1244 if (TREE_CODE (stmt) == PHI_NODE)
1246 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1247 if (e->flags & EDGE_ABNORMAL)
1249 /* This can only occur for virtual operands, since
1250 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1251 would prevent replacement. */
1252 gcc_assert (!is_gimple_reg (name));
1253 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1258 if (TREE_CODE (stmt) != PHI_NODE)
1260 tree rhs;
1262 fold_stmt_inplace (stmt);
1264 /* FIXME. This should go in pop_stmt_changes. */
1265 rhs = get_rhs (stmt);
1266 if (TREE_CODE (rhs) == ADDR_EXPR)
1267 recompute_tree_invariant_for_addr_expr (rhs);
1269 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1271 pop_stmt_changes (&stmt);
1275 gcc_assert (zero_imm_uses_p (name));
1277 /* Also update the trees stored in loop structures. */
1278 if (current_loops)
1280 struct loop *loop;
1281 loop_iterator li;
1283 FOR_EACH_LOOP (li, loop, 0)
1285 substitute_in_loop_info (loop, name, val);
1290 /* Merge block B into block A. */
1292 static void
1293 tree_merge_blocks (basic_block a, basic_block b)
1295 block_stmt_iterator bsi;
1296 tree_stmt_iterator last;
1297 tree phi;
1299 if (dump_file)
1300 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1302 /* Remove all single-valued PHI nodes from block B of the form
1303 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1304 bsi = bsi_last (a);
1305 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1307 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1308 tree copy;
1309 bool may_replace_uses = may_propagate_copy (def, use);
1311 /* In case we have loops to care about, do not propagate arguments of
1312 loop closed ssa phi nodes. */
1313 if (current_loops
1314 && is_gimple_reg (def)
1315 && TREE_CODE (use) == SSA_NAME
1316 && a->loop_father != b->loop_father)
1317 may_replace_uses = false;
1319 if (!may_replace_uses)
1321 gcc_assert (is_gimple_reg (def));
1323 /* Note that just emitting the copies is fine -- there is no problem
1324 with ordering of phi nodes. This is because A is the single
1325 predecessor of B, therefore results of the phi nodes cannot
1326 appear as arguments of the phi nodes. */
1327 copy = build2_gimple (GIMPLE_MODIFY_STMT, def, use);
1328 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1329 SSA_NAME_DEF_STMT (def) = copy;
1330 remove_phi_node (phi, NULL, false);
1332 else
1334 replace_uses_by (def, use);
1335 remove_phi_node (phi, NULL, true);
1339 /* Ensure that B follows A. */
1340 move_block_after (b, a);
1342 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1343 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1345 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1346 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1348 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1350 tree label = bsi_stmt (bsi);
1352 bsi_remove (&bsi, false);
1353 /* Now that we can thread computed gotos, we might have
1354 a situation where we have a forced label in block B
1355 However, the label at the start of block B might still be
1356 used in other ways (think about the runtime checking for
1357 Fortran assigned gotos). So we can not just delete the
1358 label. Instead we move the label to the start of block A. */
1359 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1361 block_stmt_iterator dest_bsi = bsi_start (a);
1362 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1365 else
1367 change_bb_for_stmt (bsi_stmt (bsi), a);
1368 bsi_next (&bsi);
1372 /* Merge the chains. */
1373 last = tsi_last (a->stmt_list);
1374 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1375 b->stmt_list = NULL;
1379 /* Return the one of two successors of BB that is not reachable by a
1380 reached by a complex edge, if there is one. Else, return BB. We use
1381 this in optimizations that use post-dominators for their heuristics,
1382 to catch the cases in C++ where function calls are involved. */
1384 basic_block
1385 single_noncomplex_succ (basic_block bb)
1387 edge e0, e1;
1388 if (EDGE_COUNT (bb->succs) != 2)
1389 return bb;
1391 e0 = EDGE_SUCC (bb, 0);
1392 e1 = EDGE_SUCC (bb, 1);
1393 if (e0->flags & EDGE_COMPLEX)
1394 return e1->dest;
1395 if (e1->flags & EDGE_COMPLEX)
1396 return e0->dest;
1398 return bb;
1402 /* Walk the function tree removing unnecessary statements.
1404 * Empty statement nodes are removed
1406 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1408 * Unnecessary COND_EXPRs are removed
1410 * Some unnecessary BIND_EXPRs are removed
1412 Clearly more work could be done. The trick is doing the analysis
1413 and removal fast enough to be a net improvement in compile times.
1415 Note that when we remove a control structure such as a COND_EXPR
1416 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1417 to ensure we eliminate all the useless code. */
1419 struct rus_data
1421 tree *last_goto;
1422 bool repeat;
1423 bool may_throw;
1424 bool may_branch;
1425 bool has_label;
1428 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1430 static bool
1431 remove_useless_stmts_warn_notreached (tree stmt)
1433 if (EXPR_HAS_LOCATION (stmt))
1435 location_t loc = EXPR_LOCATION (stmt);
1436 if (LOCATION_LINE (loc) > 0)
1438 warning (0, "%Hwill never be executed", &loc);
1439 return true;
1443 switch (TREE_CODE (stmt))
1445 case STATEMENT_LIST:
1447 tree_stmt_iterator i;
1448 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1449 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1450 return true;
1452 break;
1454 case COND_EXPR:
1455 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1456 return true;
1457 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1458 return true;
1459 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1460 return true;
1461 break;
1463 case TRY_FINALLY_EXPR:
1464 case TRY_CATCH_EXPR:
1465 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1466 return true;
1467 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1468 return true;
1469 break;
1471 case CATCH_EXPR:
1472 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1473 case EH_FILTER_EXPR:
1474 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1475 case BIND_EXPR:
1476 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1478 default:
1479 /* Not a live container. */
1480 break;
1483 return false;
1486 static void
1487 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1489 tree then_clause, else_clause, cond;
1490 bool save_has_label, then_has_label, else_has_label;
1492 save_has_label = data->has_label;
1493 data->has_label = false;
1494 data->last_goto = NULL;
1496 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1498 then_has_label = data->has_label;
1499 data->has_label = false;
1500 data->last_goto = NULL;
1502 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1504 else_has_label = data->has_label;
1505 data->has_label = save_has_label | then_has_label | else_has_label;
1507 then_clause = COND_EXPR_THEN (*stmt_p);
1508 else_clause = COND_EXPR_ELSE (*stmt_p);
1509 cond = fold (COND_EXPR_COND (*stmt_p));
1511 /* If neither arm does anything at all, we can remove the whole IF. */
1512 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1514 *stmt_p = build_empty_stmt ();
1515 data->repeat = true;
1518 /* If there are no reachable statements in an arm, then we can
1519 zap the entire conditional. */
1520 else if (integer_nonzerop (cond) && !else_has_label)
1522 if (warn_notreached)
1523 remove_useless_stmts_warn_notreached (else_clause);
1524 *stmt_p = then_clause;
1525 data->repeat = true;
1527 else if (integer_zerop (cond) && !then_has_label)
1529 if (warn_notreached)
1530 remove_useless_stmts_warn_notreached (then_clause);
1531 *stmt_p = else_clause;
1532 data->repeat = true;
1535 /* Check a couple of simple things on then/else with single stmts. */
1536 else
1538 tree then_stmt = expr_only (then_clause);
1539 tree else_stmt = expr_only (else_clause);
1541 /* Notice branches to a common destination. */
1542 if (then_stmt && else_stmt
1543 && TREE_CODE (then_stmt) == GOTO_EXPR
1544 && TREE_CODE (else_stmt) == GOTO_EXPR
1545 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1547 *stmt_p = then_stmt;
1548 data->repeat = true;
1551 /* If the THEN/ELSE clause merely assigns a value to a variable or
1552 parameter which is already known to contain that value, then
1553 remove the useless THEN/ELSE clause. */
1554 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1556 if (else_stmt
1557 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1558 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1559 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1560 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1562 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1563 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1564 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1565 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1567 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1568 ? then_stmt : else_stmt);
1569 tree *location = (TREE_CODE (cond) == EQ_EXPR
1570 ? &COND_EXPR_THEN (*stmt_p)
1571 : &COND_EXPR_ELSE (*stmt_p));
1573 if (stmt
1574 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1575 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1576 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1577 *location = alloc_stmt_list ();
1581 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1582 would be re-introduced during lowering. */
1583 data->last_goto = NULL;
1587 static void
1588 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1590 bool save_may_branch, save_may_throw;
1591 bool this_may_branch, this_may_throw;
1593 /* Collect may_branch and may_throw information for the body only. */
1594 save_may_branch = data->may_branch;
1595 save_may_throw = data->may_throw;
1596 data->may_branch = false;
1597 data->may_throw = false;
1598 data->last_goto = NULL;
1600 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1602 this_may_branch = data->may_branch;
1603 this_may_throw = data->may_throw;
1604 data->may_branch |= save_may_branch;
1605 data->may_throw |= save_may_throw;
1606 data->last_goto = NULL;
1608 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1610 /* If the body is empty, then we can emit the FINALLY block without
1611 the enclosing TRY_FINALLY_EXPR. */
1612 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1614 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1615 data->repeat = true;
1618 /* If the handler is empty, then we can emit the TRY block without
1619 the enclosing TRY_FINALLY_EXPR. */
1620 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1622 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1623 data->repeat = true;
1626 /* If the body neither throws, nor branches, then we can safely
1627 string the TRY and FINALLY blocks together. */
1628 else if (!this_may_branch && !this_may_throw)
1630 tree stmt = *stmt_p;
1631 *stmt_p = TREE_OPERAND (stmt, 0);
1632 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1633 data->repeat = true;
1638 static void
1639 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1641 bool save_may_throw, this_may_throw;
1642 tree_stmt_iterator i;
1643 tree stmt;
1645 /* Collect may_throw information for the body only. */
1646 save_may_throw = data->may_throw;
1647 data->may_throw = false;
1648 data->last_goto = NULL;
1650 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1652 this_may_throw = data->may_throw;
1653 data->may_throw = save_may_throw;
1655 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1656 if (!this_may_throw)
1658 if (warn_notreached)
1659 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1660 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1661 data->repeat = true;
1662 return;
1665 /* Process the catch clause specially. We may be able to tell that
1666 no exceptions propagate past this point. */
1668 this_may_throw = true;
1669 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1670 stmt = tsi_stmt (i);
1671 data->last_goto = NULL;
1673 switch (TREE_CODE (stmt))
1675 case CATCH_EXPR:
1676 for (; !tsi_end_p (i); tsi_next (&i))
1678 stmt = tsi_stmt (i);
1679 /* If we catch all exceptions, then the body does not
1680 propagate exceptions past this point. */
1681 if (CATCH_TYPES (stmt) == NULL)
1682 this_may_throw = false;
1683 data->last_goto = NULL;
1684 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1686 break;
1688 case EH_FILTER_EXPR:
1689 if (EH_FILTER_MUST_NOT_THROW (stmt))
1690 this_may_throw = false;
1691 else if (EH_FILTER_TYPES (stmt) == NULL)
1692 this_may_throw = false;
1693 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1694 break;
1696 default:
1697 /* Otherwise this is a cleanup. */
1698 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1700 /* If the cleanup is empty, then we can emit the TRY block without
1701 the enclosing TRY_CATCH_EXPR. */
1702 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1704 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1705 data->repeat = true;
1707 break;
1709 data->may_throw |= this_may_throw;
1713 static void
1714 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1716 tree block;
1718 /* First remove anything underneath the BIND_EXPR. */
1719 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1721 /* If the BIND_EXPR has no variables, then we can pull everything
1722 up one level and remove the BIND_EXPR, unless this is the toplevel
1723 BIND_EXPR for the current function or an inlined function.
1725 When this situation occurs we will want to apply this
1726 optimization again. */
1727 block = BIND_EXPR_BLOCK (*stmt_p);
1728 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1729 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1730 && (! block
1731 || ! BLOCK_ABSTRACT_ORIGIN (block)
1732 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1733 != FUNCTION_DECL)))
1735 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1736 data->repeat = true;
1741 static void
1742 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1744 tree dest = GOTO_DESTINATION (*stmt_p);
1746 data->may_branch = true;
1747 data->last_goto = NULL;
1749 /* Record the last goto expr, so that we can delete it if unnecessary. */
1750 if (TREE_CODE (dest) == LABEL_DECL)
1751 data->last_goto = stmt_p;
1755 static void
1756 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1758 tree label = LABEL_EXPR_LABEL (*stmt_p);
1760 data->has_label = true;
1762 /* We do want to jump across non-local label receiver code. */
1763 if (DECL_NONLOCAL (label))
1764 data->last_goto = NULL;
1766 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1768 *data->last_goto = build_empty_stmt ();
1769 data->repeat = true;
1772 /* ??? Add something here to delete unused labels. */
1776 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1777 decl. This allows us to eliminate redundant or useless
1778 calls to "const" functions.
1780 Gimplifier already does the same operation, but we may notice functions
1781 being const and pure once their calls has been gimplified, so we need
1782 to update the flag. */
1784 static void
1785 update_call_expr_flags (tree call)
1787 tree decl = get_callee_fndecl (call);
1788 if (!decl)
1789 return;
1790 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1791 TREE_SIDE_EFFECTS (call) = 0;
1792 if (TREE_NOTHROW (decl))
1793 TREE_NOTHROW (call) = 1;
1797 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1799 void
1800 notice_special_calls (tree t)
1802 int flags = call_expr_flags (t);
1804 if (flags & ECF_MAY_BE_ALLOCA)
1805 current_function_calls_alloca = true;
1806 if (flags & ECF_RETURNS_TWICE)
1807 current_function_calls_setjmp = true;
1811 /* Clear flags set by notice_special_calls. Used by dead code removal
1812 to update the flags. */
1814 void
1815 clear_special_calls (void)
1817 current_function_calls_alloca = false;
1818 current_function_calls_setjmp = false;
1822 static void
1823 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1825 tree t = *tp, op;
1827 switch (TREE_CODE (t))
1829 case COND_EXPR:
1830 remove_useless_stmts_cond (tp, data);
1831 break;
1833 case TRY_FINALLY_EXPR:
1834 remove_useless_stmts_tf (tp, data);
1835 break;
1837 case TRY_CATCH_EXPR:
1838 remove_useless_stmts_tc (tp, data);
1839 break;
1841 case BIND_EXPR:
1842 remove_useless_stmts_bind (tp, data);
1843 break;
1845 case GOTO_EXPR:
1846 remove_useless_stmts_goto (tp, data);
1847 break;
1849 case LABEL_EXPR:
1850 remove_useless_stmts_label (tp, data);
1851 break;
1853 case RETURN_EXPR:
1854 fold_stmt (tp);
1855 data->last_goto = NULL;
1856 data->may_branch = true;
1857 break;
1859 case CALL_EXPR:
1860 fold_stmt (tp);
1861 data->last_goto = NULL;
1862 notice_special_calls (t);
1863 update_call_expr_flags (t);
1864 if (tree_could_throw_p (t))
1865 data->may_throw = true;
1866 break;
1868 case MODIFY_EXPR:
1869 gcc_unreachable ();
1871 case GIMPLE_MODIFY_STMT:
1872 data->last_goto = NULL;
1873 fold_stmt (tp);
1874 op = get_call_expr_in (t);
1875 if (op)
1877 update_call_expr_flags (op);
1878 notice_special_calls (op);
1880 if (tree_could_throw_p (t))
1881 data->may_throw = true;
1882 break;
1884 case STATEMENT_LIST:
1886 tree_stmt_iterator i = tsi_start (t);
1887 while (!tsi_end_p (i))
1889 t = tsi_stmt (i);
1890 if (IS_EMPTY_STMT (t))
1892 tsi_delink (&i);
1893 continue;
1896 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1898 t = tsi_stmt (i);
1899 if (TREE_CODE (t) == STATEMENT_LIST)
1901 tsi_link_before (&i, t, TSI_SAME_STMT);
1902 tsi_delink (&i);
1904 else
1905 tsi_next (&i);
1908 break;
1909 case ASM_EXPR:
1910 fold_stmt (tp);
1911 data->last_goto = NULL;
1912 break;
1914 default:
1915 data->last_goto = NULL;
1916 break;
1920 static unsigned int
1921 remove_useless_stmts (void)
1923 struct rus_data data;
1925 clear_special_calls ();
1929 memset (&data, 0, sizeof (data));
1930 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1932 while (data.repeat);
1933 return 0;
1937 struct tree_opt_pass pass_remove_useless_stmts =
1939 "useless", /* name */
1940 NULL, /* gate */
1941 remove_useless_stmts, /* execute */
1942 NULL, /* sub */
1943 NULL, /* next */
1944 0, /* static_pass_number */
1945 0, /* tv_id */
1946 PROP_gimple_any, /* properties_required */
1947 0, /* properties_provided */
1948 0, /* properties_destroyed */
1949 0, /* todo_flags_start */
1950 TODO_dump_func, /* todo_flags_finish */
1951 0 /* letter */
1954 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1956 static void
1957 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1959 tree phi;
1961 /* Since this block is no longer reachable, we can just delete all
1962 of its PHI nodes. */
1963 phi = phi_nodes (bb);
1964 while (phi)
1966 tree next = PHI_CHAIN (phi);
1967 remove_phi_node (phi, NULL_TREE, true);
1968 phi = next;
1971 /* Remove edges to BB's successors. */
1972 while (EDGE_COUNT (bb->succs) > 0)
1973 remove_edge (EDGE_SUCC (bb, 0));
1977 /* Remove statements of basic block BB. */
1979 static void
1980 remove_bb (basic_block bb)
1982 block_stmt_iterator i;
1983 #ifdef USE_MAPPED_LOCATION
1984 source_location loc = UNKNOWN_LOCATION;
1985 #else
1986 source_locus loc = 0;
1987 #endif
1989 if (dump_file)
1991 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1992 if (dump_flags & TDF_DETAILS)
1994 dump_bb (bb, dump_file, 0);
1995 fprintf (dump_file, "\n");
1999 if (current_loops)
2001 struct loop *loop = bb->loop_father;
2003 /* If a loop gets removed, clean up the information associated
2004 with it. */
2005 if (loop->latch == bb
2006 || loop->header == bb)
2007 free_numbers_of_iterations_estimates_loop (loop);
2010 /* Remove all the instructions in the block. */
2011 for (i = bsi_start (bb); !bsi_end_p (i);)
2013 tree stmt = bsi_stmt (i);
2014 if (TREE_CODE (stmt) == LABEL_EXPR
2015 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2016 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2018 basic_block new_bb;
2019 block_stmt_iterator new_bsi;
2021 /* A non-reachable non-local label may still be referenced.
2022 But it no longer needs to carry the extra semantics of
2023 non-locality. */
2024 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2026 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2027 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2030 new_bb = bb->prev_bb;
2031 new_bsi = bsi_start (new_bb);
2032 bsi_remove (&i, false);
2033 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2035 else
2037 /* Release SSA definitions if we are in SSA. Note that we
2038 may be called when not in SSA. For example,
2039 final_cleanup calls this function via
2040 cleanup_tree_cfg. */
2041 if (gimple_in_ssa_p (cfun))
2042 release_defs (stmt);
2044 bsi_remove (&i, true);
2047 /* Don't warn for removed gotos. Gotos are often removed due to
2048 jump threading, thus resulting in bogus warnings. Not great,
2049 since this way we lose warnings for gotos in the original
2050 program that are indeed unreachable. */
2051 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2053 #ifdef USE_MAPPED_LOCATION
2054 if (EXPR_HAS_LOCATION (stmt))
2055 loc = EXPR_LOCATION (stmt);
2056 #else
2057 source_locus t;
2058 t = EXPR_LOCUS (stmt);
2059 if (t && LOCATION_LINE (*t) > 0)
2060 loc = t;
2061 #endif
2065 /* If requested, give a warning that the first statement in the
2066 block is unreachable. We walk statements backwards in the
2067 loop above, so the last statement we process is the first statement
2068 in the block. */
2069 #ifdef USE_MAPPED_LOCATION
2070 if (loc > BUILTINS_LOCATION)
2071 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2072 #else
2073 if (loc)
2074 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2075 #endif
2077 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2081 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2082 predicate VAL, return the edge that will be taken out of the block.
2083 If VAL does not match a unique edge, NULL is returned. */
2085 edge
2086 find_taken_edge (basic_block bb, tree val)
2088 tree stmt;
2090 stmt = last_stmt (bb);
2092 gcc_assert (stmt);
2093 gcc_assert (is_ctrl_stmt (stmt));
2094 gcc_assert (val);
2096 if (! is_gimple_min_invariant (val))
2097 return NULL;
2099 if (TREE_CODE (stmt) == COND_EXPR)
2100 return find_taken_edge_cond_expr (bb, val);
2102 if (TREE_CODE (stmt) == SWITCH_EXPR)
2103 return find_taken_edge_switch_expr (bb, val);
2105 if (computed_goto_p (stmt))
2106 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2108 gcc_unreachable ();
2111 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2112 statement, determine which of the outgoing edges will be taken out of the
2113 block. Return NULL if either edge may be taken. */
2115 static edge
2116 find_taken_edge_computed_goto (basic_block bb, tree val)
2118 basic_block dest;
2119 edge e = NULL;
2121 dest = label_to_block (val);
2122 if (dest)
2124 e = find_edge (bb, dest);
2125 gcc_assert (e != NULL);
2128 return e;
2131 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2132 statement, determine which of the two edges will be taken out of the
2133 block. Return NULL if either edge may be taken. */
2135 static edge
2136 find_taken_edge_cond_expr (basic_block bb, tree val)
2138 edge true_edge, false_edge;
2140 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2142 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2143 return (integer_zerop (val) ? false_edge : true_edge);
2146 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2147 statement, determine which edge will be taken out of the block. Return
2148 NULL if any edge may be taken. */
2150 static edge
2151 find_taken_edge_switch_expr (basic_block bb, tree val)
2153 tree switch_expr, taken_case;
2154 basic_block dest_bb;
2155 edge e;
2157 switch_expr = last_stmt (bb);
2158 taken_case = find_case_label_for_value (switch_expr, val);
2159 dest_bb = label_to_block (CASE_LABEL (taken_case));
2161 e = find_edge (bb, dest_bb);
2162 gcc_assert (e);
2163 return e;
2167 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2168 We can make optimal use here of the fact that the case labels are
2169 sorted: We can do a binary search for a case matching VAL. */
2171 static tree
2172 find_case_label_for_value (tree switch_expr, tree val)
2174 tree vec = SWITCH_LABELS (switch_expr);
2175 size_t low, high, n = TREE_VEC_LENGTH (vec);
2176 tree default_case = TREE_VEC_ELT (vec, n - 1);
2178 for (low = -1, high = n - 1; high - low > 1; )
2180 size_t i = (high + low) / 2;
2181 tree t = TREE_VEC_ELT (vec, i);
2182 int cmp;
2184 /* Cache the result of comparing CASE_LOW and val. */
2185 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2187 if (cmp > 0)
2188 high = i;
2189 else
2190 low = i;
2192 if (CASE_HIGH (t) == NULL)
2194 /* A singe-valued case label. */
2195 if (cmp == 0)
2196 return t;
2198 else
2200 /* A case range. We can only handle integer ranges. */
2201 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2202 return t;
2206 return default_case;
2212 /*---------------------------------------------------------------------------
2213 Debugging functions
2214 ---------------------------------------------------------------------------*/
2216 /* Dump tree-specific information of block BB to file OUTF. */
2218 void
2219 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2221 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2225 /* Dump a basic block on stderr. */
2227 void
2228 debug_tree_bb (basic_block bb)
2230 dump_bb (bb, stderr, 0);
2234 /* Dump basic block with index N on stderr. */
2236 basic_block
2237 debug_tree_bb_n (int n)
2239 debug_tree_bb (BASIC_BLOCK (n));
2240 return BASIC_BLOCK (n);
2244 /* Dump the CFG on stderr.
2246 FLAGS are the same used by the tree dumping functions
2247 (see TDF_* in tree-pass.h). */
2249 void
2250 debug_tree_cfg (int flags)
2252 dump_tree_cfg (stderr, flags);
2256 /* Dump the program showing basic block boundaries on the given FILE.
2258 FLAGS are the same used by the tree dumping functions (see TDF_* in
2259 tree.h). */
2261 void
2262 dump_tree_cfg (FILE *file, int flags)
2264 if (flags & TDF_DETAILS)
2266 const char *funcname
2267 = lang_hooks.decl_printable_name (current_function_decl, 2);
2269 fputc ('\n', file);
2270 fprintf (file, ";; Function %s\n\n", funcname);
2271 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2272 n_basic_blocks, n_edges, last_basic_block);
2274 brief_dump_cfg (file);
2275 fprintf (file, "\n");
2278 if (flags & TDF_STATS)
2279 dump_cfg_stats (file);
2281 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2285 /* Dump CFG statistics on FILE. */
2287 void
2288 dump_cfg_stats (FILE *file)
2290 static long max_num_merged_labels = 0;
2291 unsigned long size, total = 0;
2292 long num_edges;
2293 basic_block bb;
2294 const char * const fmt_str = "%-30s%-13s%12s\n";
2295 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2296 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2297 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2298 const char *funcname
2299 = lang_hooks.decl_printable_name (current_function_decl, 2);
2302 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2304 fprintf (file, "---------------------------------------------------------\n");
2305 fprintf (file, fmt_str, "", " Number of ", "Memory");
2306 fprintf (file, fmt_str, "", " instances ", "used ");
2307 fprintf (file, "---------------------------------------------------------\n");
2309 size = n_basic_blocks * sizeof (struct basic_block_def);
2310 total += size;
2311 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2312 SCALE (size), LABEL (size));
2314 num_edges = 0;
2315 FOR_EACH_BB (bb)
2316 num_edges += EDGE_COUNT (bb->succs);
2317 size = num_edges * sizeof (struct edge_def);
2318 total += size;
2319 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2321 fprintf (file, "---------------------------------------------------------\n");
2322 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2323 LABEL (total));
2324 fprintf (file, "---------------------------------------------------------\n");
2325 fprintf (file, "\n");
2327 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2328 max_num_merged_labels = cfg_stats.num_merged_labels;
2330 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2331 cfg_stats.num_merged_labels, max_num_merged_labels);
2333 fprintf (file, "\n");
2337 /* Dump CFG statistics on stderr. Keep extern so that it's always
2338 linked in the final executable. */
2340 void
2341 debug_cfg_stats (void)
2343 dump_cfg_stats (stderr);
2347 /* Dump the flowgraph to a .vcg FILE. */
2349 static void
2350 tree_cfg2vcg (FILE *file)
2352 edge e;
2353 edge_iterator ei;
2354 basic_block bb;
2355 const char *funcname
2356 = lang_hooks.decl_printable_name (current_function_decl, 2);
2358 /* Write the file header. */
2359 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2360 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2361 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2363 /* Write blocks and edges. */
2364 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2366 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2367 e->dest->index);
2369 if (e->flags & EDGE_FAKE)
2370 fprintf (file, " linestyle: dotted priority: 10");
2371 else
2372 fprintf (file, " linestyle: solid priority: 100");
2374 fprintf (file, " }\n");
2376 fputc ('\n', file);
2378 FOR_EACH_BB (bb)
2380 enum tree_code head_code, end_code;
2381 const char *head_name, *end_name;
2382 int head_line = 0;
2383 int end_line = 0;
2384 tree first = first_stmt (bb);
2385 tree last = last_stmt (bb);
2387 if (first)
2389 head_code = TREE_CODE (first);
2390 head_name = tree_code_name[head_code];
2391 head_line = get_lineno (first);
2393 else
2394 head_name = "no-statement";
2396 if (last)
2398 end_code = TREE_CODE (last);
2399 end_name = tree_code_name[end_code];
2400 end_line = get_lineno (last);
2402 else
2403 end_name = "no-statement";
2405 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2406 bb->index, bb->index, head_name, head_line, end_name,
2407 end_line);
2409 FOR_EACH_EDGE (e, ei, bb->succs)
2411 if (e->dest == EXIT_BLOCK_PTR)
2412 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2413 else
2414 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2416 if (e->flags & EDGE_FAKE)
2417 fprintf (file, " priority: 10 linestyle: dotted");
2418 else
2419 fprintf (file, " priority: 100 linestyle: solid");
2421 fprintf (file, " }\n");
2424 if (bb->next_bb != EXIT_BLOCK_PTR)
2425 fputc ('\n', file);
2428 fputs ("}\n\n", file);
2433 /*---------------------------------------------------------------------------
2434 Miscellaneous helpers
2435 ---------------------------------------------------------------------------*/
2437 /* Return true if T represents a stmt that always transfers control. */
2439 bool
2440 is_ctrl_stmt (tree t)
2442 return (TREE_CODE (t) == COND_EXPR
2443 || TREE_CODE (t) == SWITCH_EXPR
2444 || TREE_CODE (t) == GOTO_EXPR
2445 || TREE_CODE (t) == RETURN_EXPR
2446 || TREE_CODE (t) == RESX_EXPR);
2450 /* Return true if T is a statement that may alter the flow of control
2451 (e.g., a call to a non-returning function). */
2453 bool
2454 is_ctrl_altering_stmt (tree t)
2456 tree call;
2458 gcc_assert (t);
2459 call = get_call_expr_in (t);
2460 if (call)
2462 /* A non-pure/const CALL_EXPR alters flow control if the current
2463 function has nonlocal labels. */
2464 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2465 return true;
2467 /* A CALL_EXPR also alters control flow if it does not return. */
2468 if (call_expr_flags (call) & ECF_NORETURN)
2469 return true;
2472 /* OpenMP directives alter control flow. */
2473 if (OMP_DIRECTIVE_P (t))
2474 return true;
2476 /* If a statement can throw, it alters control flow. */
2477 return tree_can_throw_internal (t);
2481 /* Return true if T is a computed goto. */
2483 bool
2484 computed_goto_p (tree t)
2486 return (TREE_CODE (t) == GOTO_EXPR
2487 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2491 /* Return true if T is a simple local goto. */
2493 bool
2494 simple_goto_p (tree t)
2496 return (TREE_CODE (t) == GOTO_EXPR
2497 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2501 /* Return true if T can make an abnormal transfer of control flow.
2502 Transfers of control flow associated with EH are excluded. */
2504 bool
2505 tree_can_make_abnormal_goto (tree t)
2507 if (computed_goto_p (t))
2508 return true;
2509 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2510 t = GIMPLE_STMT_OPERAND (t, 1);
2511 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2512 t = TREE_OPERAND (t, 0);
2513 if (TREE_CODE (t) == CALL_EXPR)
2514 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2515 return false;
2519 /* Return true if T should start a new basic block. PREV_T is the
2520 statement preceding T. It is used when T is a label or a case label.
2521 Labels should only start a new basic block if their previous statement
2522 wasn't a label. Otherwise, sequence of labels would generate
2523 unnecessary basic blocks that only contain a single label. */
2525 static inline bool
2526 stmt_starts_bb_p (tree t, tree prev_t)
2528 if (t == NULL_TREE)
2529 return false;
2531 /* LABEL_EXPRs start a new basic block only if the preceding
2532 statement wasn't a label of the same type. This prevents the
2533 creation of consecutive blocks that have nothing but a single
2534 label. */
2535 if (TREE_CODE (t) == LABEL_EXPR)
2537 /* Nonlocal and computed GOTO targets always start a new block. */
2538 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2539 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2540 return true;
2542 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2544 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2545 return true;
2547 cfg_stats.num_merged_labels++;
2548 return false;
2550 else
2551 return true;
2554 return false;
2558 /* Return true if T should end a basic block. */
2560 bool
2561 stmt_ends_bb_p (tree t)
2563 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2567 /* Add gotos that used to be represented implicitly in the CFG. */
2569 void
2570 disband_implicit_edges (void)
2572 basic_block bb;
2573 block_stmt_iterator last;
2574 edge e;
2575 edge_iterator ei;
2576 tree stmt, label;
2578 FOR_EACH_BB (bb)
2580 last = bsi_last (bb);
2581 stmt = last_stmt (bb);
2583 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2585 /* Remove superfluous gotos from COND_EXPR branches. Moved
2586 from cfg_remove_useless_stmts here since it violates the
2587 invariants for tree--cfg correspondence and thus fits better
2588 here where we do it anyway. */
2589 e = find_edge (bb, bb->next_bb);
2590 if (e)
2592 if (e->flags & EDGE_TRUE_VALUE)
2593 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2594 else if (e->flags & EDGE_FALSE_VALUE)
2595 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2596 else
2597 gcc_unreachable ();
2598 e->flags |= EDGE_FALLTHRU;
2601 continue;
2604 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2606 /* Remove the RETURN_EXPR if we may fall though to the exit
2607 instead. */
2608 gcc_assert (single_succ_p (bb));
2609 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2611 if (bb->next_bb == EXIT_BLOCK_PTR
2612 && !TREE_OPERAND (stmt, 0))
2614 bsi_remove (&last, true);
2615 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2617 continue;
2620 /* There can be no fallthru edge if the last statement is a control
2621 one. */
2622 if (stmt && is_ctrl_stmt (stmt))
2623 continue;
2625 /* Find a fallthru edge and emit the goto if necessary. */
2626 FOR_EACH_EDGE (e, ei, bb->succs)
2627 if (e->flags & EDGE_FALLTHRU)
2628 break;
2630 if (!e || e->dest == bb->next_bb)
2631 continue;
2633 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2634 label = tree_block_label (e->dest);
2636 stmt = build1 (GOTO_EXPR, void_type_node, label);
2637 #ifdef USE_MAPPED_LOCATION
2638 SET_EXPR_LOCATION (stmt, e->goto_locus);
2639 #else
2640 SET_EXPR_LOCUS (stmt, e->goto_locus);
2641 #endif
2642 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2643 e->flags &= ~EDGE_FALLTHRU;
2647 /* Remove block annotations and other datastructures. */
2649 void
2650 delete_tree_cfg_annotations (void)
2652 basic_block bb;
2653 block_stmt_iterator bsi;
2655 /* Remove annotations from every tree in the function. */
2656 FOR_EACH_BB (bb)
2657 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2659 tree stmt = bsi_stmt (bsi);
2660 ggc_free (stmt->base.ann);
2661 stmt->base.ann = NULL;
2663 label_to_block_map = NULL;
2667 /* Return the first statement in basic block BB. */
2669 tree
2670 first_stmt (basic_block bb)
2672 block_stmt_iterator i = bsi_start (bb);
2673 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2677 /* Return the last statement in basic block BB. */
2679 tree
2680 last_stmt (basic_block bb)
2682 block_stmt_iterator b = bsi_last (bb);
2683 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2687 /* Return the last statement of an otherwise empty block. Return NULL
2688 if the block is totally empty, or if it contains more than one
2689 statement. */
2691 tree
2692 last_and_only_stmt (basic_block bb)
2694 block_stmt_iterator i = bsi_last (bb);
2695 tree last, prev;
2697 if (bsi_end_p (i))
2698 return NULL_TREE;
2700 last = bsi_stmt (i);
2701 bsi_prev (&i);
2702 if (bsi_end_p (i))
2703 return last;
2705 /* Empty statements should no longer appear in the instruction stream.
2706 Everything that might have appeared before should be deleted by
2707 remove_useless_stmts, and the optimizers should just bsi_remove
2708 instead of smashing with build_empty_stmt.
2710 Thus the only thing that should appear here in a block containing
2711 one executable statement is a label. */
2712 prev = bsi_stmt (i);
2713 if (TREE_CODE (prev) == LABEL_EXPR)
2714 return last;
2715 else
2716 return NULL_TREE;
2720 /* Mark BB as the basic block holding statement T. */
2722 void
2723 set_bb_for_stmt (tree t, basic_block bb)
2725 if (TREE_CODE (t) == PHI_NODE)
2726 PHI_BB (t) = bb;
2727 else if (TREE_CODE (t) == STATEMENT_LIST)
2729 tree_stmt_iterator i;
2730 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2731 set_bb_for_stmt (tsi_stmt (i), bb);
2733 else
2735 stmt_ann_t ann = get_stmt_ann (t);
2736 ann->bb = bb;
2738 /* If the statement is a label, add the label to block-to-labels map
2739 so that we can speed up edge creation for GOTO_EXPRs. */
2740 if (TREE_CODE (t) == LABEL_EXPR)
2742 int uid;
2744 t = LABEL_EXPR_LABEL (t);
2745 uid = LABEL_DECL_UID (t);
2746 if (uid == -1)
2748 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2749 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2750 if (old_len <= (unsigned) uid)
2752 unsigned new_len = 3 * uid / 2;
2754 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2755 new_len);
2758 else
2759 /* We're moving an existing label. Make sure that we've
2760 removed it from the old block. */
2761 gcc_assert (!bb
2762 || !VEC_index (basic_block, label_to_block_map, uid));
2763 VEC_replace (basic_block, label_to_block_map, uid, bb);
2768 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2769 from one basic block to another.
2770 For BB splitting we can run into quadratic case, so performance is quite
2771 important and knowing that the tables are big enough, change_bb_for_stmt
2772 can inline as leaf function. */
2773 static inline void
2774 change_bb_for_stmt (tree t, basic_block bb)
2776 get_stmt_ann (t)->bb = bb;
2777 if (TREE_CODE (t) == LABEL_EXPR)
2778 VEC_replace (basic_block, label_to_block_map,
2779 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2782 /* Finds iterator for STMT. */
2784 extern block_stmt_iterator
2785 bsi_for_stmt (tree stmt)
2787 block_stmt_iterator bsi;
2789 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2790 if (bsi_stmt (bsi) == stmt)
2791 return bsi;
2793 gcc_unreachable ();
2796 /* Mark statement T as modified, and update it. */
2797 static inline void
2798 update_modified_stmts (tree t)
2800 if (!ssa_operands_active ())
2801 return;
2802 if (TREE_CODE (t) == STATEMENT_LIST)
2804 tree_stmt_iterator i;
2805 tree stmt;
2806 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2808 stmt = tsi_stmt (i);
2809 update_stmt_if_modified (stmt);
2812 else
2813 update_stmt_if_modified (t);
2816 /* Insert statement (or statement list) T before the statement
2817 pointed-to by iterator I. M specifies how to update iterator I
2818 after insertion (see enum bsi_iterator_update). */
2820 void
2821 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2823 set_bb_for_stmt (t, i->bb);
2824 update_modified_stmts (t);
2825 tsi_link_before (&i->tsi, t, m);
2829 /* Insert statement (or statement list) T after the statement
2830 pointed-to by iterator I. M specifies how to update iterator I
2831 after insertion (see enum bsi_iterator_update). */
2833 void
2834 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2836 set_bb_for_stmt (t, i->bb);
2837 update_modified_stmts (t);
2838 tsi_link_after (&i->tsi, t, m);
2842 /* Remove the statement pointed to by iterator I. The iterator is updated
2843 to the next statement.
2845 When REMOVE_EH_INFO is true we remove the statement pointed to by
2846 iterator I from the EH tables. Otherwise we do not modify the EH
2847 tables.
2849 Generally, REMOVE_EH_INFO should be true when the statement is going to
2850 be removed from the IL and not reinserted elsewhere. */
2852 void
2853 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2855 tree t = bsi_stmt (*i);
2856 set_bb_for_stmt (t, NULL);
2857 delink_stmt_imm_use (t);
2858 tsi_delink (&i->tsi);
2859 mark_stmt_modified (t);
2860 if (remove_eh_info)
2862 remove_stmt_from_eh_region (t);
2863 gimple_remove_stmt_histograms (cfun, t);
2868 /* Move the statement at FROM so it comes right after the statement at TO. */
2870 void
2871 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2873 tree stmt = bsi_stmt (*from);
2874 bsi_remove (from, false);
2875 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2879 /* Move the statement at FROM so it comes right before the statement at TO. */
2881 void
2882 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2884 tree stmt = bsi_stmt (*from);
2885 bsi_remove (from, false);
2886 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2890 /* Move the statement at FROM to the end of basic block BB. */
2892 void
2893 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2895 block_stmt_iterator last = bsi_last (bb);
2897 /* Have to check bsi_end_p because it could be an empty block. */
2898 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2899 bsi_move_before (from, &last);
2900 else
2901 bsi_move_after (from, &last);
2905 /* Replace the contents of the statement pointed to by iterator BSI
2906 with STMT. If UPDATE_EH_INFO is true, the exception handling
2907 information of the original statement is moved to the new statement. */
2909 void
2910 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2912 int eh_region;
2913 tree orig_stmt = bsi_stmt (*bsi);
2915 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2916 set_bb_for_stmt (stmt, bsi->bb);
2918 /* Preserve EH region information from the original statement, if
2919 requested by the caller. */
2920 if (update_eh_info)
2922 eh_region = lookup_stmt_eh_region (orig_stmt);
2923 if (eh_region >= 0)
2925 remove_stmt_from_eh_region (orig_stmt);
2926 add_stmt_to_eh_region (stmt, eh_region);
2927 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2928 gimple_remove_stmt_histograms (cfun, orig_stmt);
2932 delink_stmt_imm_use (orig_stmt);
2933 *bsi_stmt_ptr (*bsi) = stmt;
2934 mark_stmt_modified (stmt);
2935 update_modified_stmts (stmt);
2939 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2940 is made to place the statement in an existing basic block, but
2941 sometimes that isn't possible. When it isn't possible, the edge is
2942 split and the statement is added to the new block.
2944 In all cases, the returned *BSI points to the correct location. The
2945 return value is true if insertion should be done after the location,
2946 or false if it should be done before the location. If new basic block
2947 has to be created, it is stored in *NEW_BB. */
2949 static bool
2950 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2951 basic_block *new_bb)
2953 basic_block dest, src;
2954 tree tmp;
2956 dest = e->dest;
2957 restart:
2959 /* If the destination has one predecessor which has no PHI nodes,
2960 insert there. Except for the exit block.
2962 The requirement for no PHI nodes could be relaxed. Basically we
2963 would have to examine the PHIs to prove that none of them used
2964 the value set by the statement we want to insert on E. That
2965 hardly seems worth the effort. */
2966 if (single_pred_p (dest)
2967 && ! phi_nodes (dest)
2968 && dest != EXIT_BLOCK_PTR)
2970 *bsi = bsi_start (dest);
2971 if (bsi_end_p (*bsi))
2972 return true;
2974 /* Make sure we insert after any leading labels. */
2975 tmp = bsi_stmt (*bsi);
2976 while (TREE_CODE (tmp) == LABEL_EXPR)
2978 bsi_next (bsi);
2979 if (bsi_end_p (*bsi))
2980 break;
2981 tmp = bsi_stmt (*bsi);
2984 if (bsi_end_p (*bsi))
2986 *bsi = bsi_last (dest);
2987 return true;
2989 else
2990 return false;
2993 /* If the source has one successor, the edge is not abnormal and
2994 the last statement does not end a basic block, insert there.
2995 Except for the entry block. */
2996 src = e->src;
2997 if ((e->flags & EDGE_ABNORMAL) == 0
2998 && single_succ_p (src)
2999 && src != ENTRY_BLOCK_PTR)
3001 *bsi = bsi_last (src);
3002 if (bsi_end_p (*bsi))
3003 return true;
3005 tmp = bsi_stmt (*bsi);
3006 if (!stmt_ends_bb_p (tmp))
3007 return true;
3009 /* Insert code just before returning the value. We may need to decompose
3010 the return in the case it contains non-trivial operand. */
3011 if (TREE_CODE (tmp) == RETURN_EXPR)
3013 tree op = TREE_OPERAND (tmp, 0);
3014 if (op && !is_gimple_val (op))
3016 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
3017 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3018 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
3020 bsi_prev (bsi);
3021 return true;
3025 /* Otherwise, create a new basic block, and split this edge. */
3026 dest = split_edge (e);
3027 if (new_bb)
3028 *new_bb = dest;
3029 e = single_pred_edge (dest);
3030 goto restart;
3034 /* This routine will commit all pending edge insertions, creating any new
3035 basic blocks which are necessary. */
3037 void
3038 bsi_commit_edge_inserts (void)
3040 basic_block bb;
3041 edge e;
3042 edge_iterator ei;
3044 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3046 FOR_EACH_BB (bb)
3047 FOR_EACH_EDGE (e, ei, bb->succs)
3048 bsi_commit_one_edge_insert (e, NULL);
3052 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3053 to this block, otherwise set it to NULL. */
3055 void
3056 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3058 if (new_bb)
3059 *new_bb = NULL;
3060 if (PENDING_STMT (e))
3062 block_stmt_iterator bsi;
3063 tree stmt = PENDING_STMT (e);
3065 PENDING_STMT (e) = NULL_TREE;
3067 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3068 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3069 else
3070 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3075 /* Add STMT to the pending list of edge E. No actual insertion is
3076 made until a call to bsi_commit_edge_inserts () is made. */
3078 void
3079 bsi_insert_on_edge (edge e, tree stmt)
3081 append_to_statement_list (stmt, &PENDING_STMT (e));
3084 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3085 block has to be created, it is returned. */
3087 basic_block
3088 bsi_insert_on_edge_immediate (edge e, tree stmt)
3090 block_stmt_iterator bsi;
3091 basic_block new_bb = NULL;
3093 gcc_assert (!PENDING_STMT (e));
3095 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3096 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3097 else
3098 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3100 return new_bb;
3103 /*---------------------------------------------------------------------------
3104 Tree specific functions for CFG manipulation
3105 ---------------------------------------------------------------------------*/
3107 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3109 static void
3110 reinstall_phi_args (edge new_edge, edge old_edge)
3112 tree var, phi;
3114 if (!PENDING_STMT (old_edge))
3115 return;
3117 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3118 var && phi;
3119 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3121 tree result = TREE_PURPOSE (var);
3122 tree arg = TREE_VALUE (var);
3124 gcc_assert (result == PHI_RESULT (phi));
3126 add_phi_arg (phi, arg, new_edge);
3129 PENDING_STMT (old_edge) = NULL;
3132 /* Returns the basic block after which the new basic block created
3133 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3134 near its "logical" location. This is of most help to humans looking
3135 at debugging dumps. */
3137 static basic_block
3138 split_edge_bb_loc (edge edge_in)
3140 basic_block dest = edge_in->dest;
3142 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3143 return edge_in->src;
3144 else
3145 return dest->prev_bb;
3148 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3149 Abort on abnormal edges. */
3151 static basic_block
3152 tree_split_edge (edge edge_in)
3154 basic_block new_bb, after_bb, dest;
3155 edge new_edge, e;
3157 /* Abnormal edges cannot be split. */
3158 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3160 dest = edge_in->dest;
3162 after_bb = split_edge_bb_loc (edge_in);
3164 new_bb = create_empty_bb (after_bb);
3165 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3166 new_bb->count = edge_in->count;
3167 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3168 new_edge->probability = REG_BR_PROB_BASE;
3169 new_edge->count = edge_in->count;
3171 e = redirect_edge_and_branch (edge_in, new_bb);
3172 gcc_assert (e);
3173 reinstall_phi_args (new_edge, e);
3175 return new_bb;
3179 /* Return true when BB has label LABEL in it. */
3181 static bool
3182 has_label_p (basic_block bb, tree label)
3184 block_stmt_iterator bsi;
3186 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3188 tree stmt = bsi_stmt (bsi);
3190 if (TREE_CODE (stmt) != LABEL_EXPR)
3191 return false;
3192 if (LABEL_EXPR_LABEL (stmt) == label)
3193 return true;
3195 return false;
3199 /* Callback for walk_tree, check that all elements with address taken are
3200 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3201 inside a PHI node. */
3203 static tree
3204 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3206 tree t = *tp, x;
3207 bool in_phi = (data != NULL);
3209 if (TYPE_P (t))
3210 *walk_subtrees = 0;
3212 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3213 #define CHECK_OP(N, MSG) \
3214 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3215 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3217 switch (TREE_CODE (t))
3219 case SSA_NAME:
3220 if (SSA_NAME_IN_FREE_LIST (t))
3222 error ("SSA name in freelist but still referenced");
3223 return *tp;
3225 break;
3227 case ASSERT_EXPR:
3228 x = fold (ASSERT_EXPR_COND (t));
3229 if (x == boolean_false_node)
3231 error ("ASSERT_EXPR with an always-false condition");
3232 return *tp;
3234 break;
3236 case MODIFY_EXPR:
3237 gcc_unreachable ();
3239 case GIMPLE_MODIFY_STMT:
3240 x = GIMPLE_STMT_OPERAND (t, 0);
3241 if (TREE_CODE (x) == BIT_FIELD_REF
3242 && is_gimple_reg (TREE_OPERAND (x, 0)))
3244 error ("GIMPLE register modified with BIT_FIELD_REF");
3245 return t;
3247 break;
3249 case ADDR_EXPR:
3251 bool old_invariant;
3252 bool old_constant;
3253 bool old_side_effects;
3254 bool new_invariant;
3255 bool new_constant;
3256 bool new_side_effects;
3258 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3259 dead PHIs that take the address of something. But if the PHI
3260 result is dead, the fact that it takes the address of anything
3261 is irrelevant. Because we can not tell from here if a PHI result
3262 is dead, we just skip this check for PHIs altogether. This means
3263 we may be missing "valid" checks, but what can you do?
3264 This was PR19217. */
3265 if (in_phi)
3266 break;
3268 old_invariant = TREE_INVARIANT (t);
3269 old_constant = TREE_CONSTANT (t);
3270 old_side_effects = TREE_SIDE_EFFECTS (t);
3272 recompute_tree_invariant_for_addr_expr (t);
3273 new_invariant = TREE_INVARIANT (t);
3274 new_side_effects = TREE_SIDE_EFFECTS (t);
3275 new_constant = TREE_CONSTANT (t);
3277 if (old_invariant != new_invariant)
3279 error ("invariant not recomputed when ADDR_EXPR changed");
3280 return t;
3283 if (old_constant != new_constant)
3285 error ("constant not recomputed when ADDR_EXPR changed");
3286 return t;
3288 if (old_side_effects != new_side_effects)
3290 error ("side effects not recomputed when ADDR_EXPR changed");
3291 return t;
3294 /* Skip any references (they will be checked when we recurse down the
3295 tree) and ensure that any variable used as a prefix is marked
3296 addressable. */
3297 for (x = TREE_OPERAND (t, 0);
3298 handled_component_p (x);
3299 x = TREE_OPERAND (x, 0))
3302 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3303 return NULL;
3304 if (!TREE_ADDRESSABLE (x))
3306 error ("address taken, but ADDRESSABLE bit not set");
3307 return x;
3309 break;
3312 case COND_EXPR:
3313 x = COND_EXPR_COND (t);
3314 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3316 error ("non-boolean used in condition");
3317 return x;
3319 if (!is_gimple_condexpr (x))
3321 error ("invalid conditional operand");
3322 return x;
3324 break;
3326 case NOP_EXPR:
3327 case CONVERT_EXPR:
3328 case FIX_TRUNC_EXPR:
3329 case FLOAT_EXPR:
3330 case NEGATE_EXPR:
3331 case ABS_EXPR:
3332 case BIT_NOT_EXPR:
3333 case NON_LVALUE_EXPR:
3334 case TRUTH_NOT_EXPR:
3335 CHECK_OP (0, "invalid operand to unary operator");
3336 break;
3338 case REALPART_EXPR:
3339 case IMAGPART_EXPR:
3340 case COMPONENT_REF:
3341 case ARRAY_REF:
3342 case ARRAY_RANGE_REF:
3343 case BIT_FIELD_REF:
3344 case VIEW_CONVERT_EXPR:
3345 /* We have a nest of references. Verify that each of the operands
3346 that determine where to reference is either a constant or a variable,
3347 verify that the base is valid, and then show we've already checked
3348 the subtrees. */
3349 while (handled_component_p (t))
3351 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3352 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3353 else if (TREE_CODE (t) == ARRAY_REF
3354 || TREE_CODE (t) == ARRAY_RANGE_REF)
3356 CHECK_OP (1, "invalid array index");
3357 if (TREE_OPERAND (t, 2))
3358 CHECK_OP (2, "invalid array lower bound");
3359 if (TREE_OPERAND (t, 3))
3360 CHECK_OP (3, "invalid array stride");
3362 else if (TREE_CODE (t) == BIT_FIELD_REF)
3364 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3365 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3368 t = TREE_OPERAND (t, 0);
3371 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3373 error ("invalid reference prefix");
3374 return t;
3376 *walk_subtrees = 0;
3377 break;
3379 case LT_EXPR:
3380 case LE_EXPR:
3381 case GT_EXPR:
3382 case GE_EXPR:
3383 case EQ_EXPR:
3384 case NE_EXPR:
3385 case UNORDERED_EXPR:
3386 case ORDERED_EXPR:
3387 case UNLT_EXPR:
3388 case UNLE_EXPR:
3389 case UNGT_EXPR:
3390 case UNGE_EXPR:
3391 case UNEQ_EXPR:
3392 case LTGT_EXPR:
3393 case PLUS_EXPR:
3394 case MINUS_EXPR:
3395 case MULT_EXPR:
3396 case TRUNC_DIV_EXPR:
3397 case CEIL_DIV_EXPR:
3398 case FLOOR_DIV_EXPR:
3399 case ROUND_DIV_EXPR:
3400 case TRUNC_MOD_EXPR:
3401 case CEIL_MOD_EXPR:
3402 case FLOOR_MOD_EXPR:
3403 case ROUND_MOD_EXPR:
3404 case RDIV_EXPR:
3405 case EXACT_DIV_EXPR:
3406 case MIN_EXPR:
3407 case MAX_EXPR:
3408 case LSHIFT_EXPR:
3409 case RSHIFT_EXPR:
3410 case LROTATE_EXPR:
3411 case RROTATE_EXPR:
3412 case BIT_IOR_EXPR:
3413 case BIT_XOR_EXPR:
3414 case BIT_AND_EXPR:
3415 CHECK_OP (0, "invalid operand to binary operator");
3416 CHECK_OP (1, "invalid operand to binary operator");
3417 break;
3419 case CONSTRUCTOR:
3420 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3421 *walk_subtrees = 0;
3422 break;
3424 default:
3425 break;
3427 return NULL;
3429 #undef CHECK_OP
3433 /* Verify STMT, return true if STMT is not in GIMPLE form.
3434 TODO: Implement type checking. */
3436 static bool
3437 verify_stmt (tree stmt, bool last_in_block)
3439 tree addr;
3441 if (OMP_DIRECTIVE_P (stmt))
3443 /* OpenMP directives are validated by the FE and never operated
3444 on by the optimizers. Furthermore, OMP_FOR may contain
3445 non-gimple expressions when the main index variable has had
3446 its address taken. This does not affect the loop itself
3447 because the header of an OMP_FOR is merely used to determine
3448 how to setup the parallel iteration. */
3449 return false;
3452 if (!is_gimple_stmt (stmt))
3454 error ("is not a valid GIMPLE statement");
3455 goto fail;
3458 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3459 if (addr)
3461 debug_generic_stmt (addr);
3462 return true;
3465 /* If the statement is marked as part of an EH region, then it is
3466 expected that the statement could throw. Verify that when we
3467 have optimizations that simplify statements such that we prove
3468 that they cannot throw, that we update other data structures
3469 to match. */
3470 if (lookup_stmt_eh_region (stmt) >= 0)
3472 if (!tree_could_throw_p (stmt))
3474 error ("statement marked for throw, but doesn%'t");
3475 goto fail;
3477 if (!last_in_block && tree_can_throw_internal (stmt))
3479 error ("statement marked for throw in middle of block");
3480 goto fail;
3484 return false;
3486 fail:
3487 debug_generic_stmt (stmt);
3488 return true;
3492 /* Return true when the T can be shared. */
3494 static bool
3495 tree_node_can_be_shared (tree t)
3497 if (IS_TYPE_OR_DECL_P (t)
3498 || is_gimple_min_invariant (t)
3499 || TREE_CODE (t) == SSA_NAME
3500 || t == error_mark_node
3501 || TREE_CODE (t) == IDENTIFIER_NODE)
3502 return true;
3504 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3505 return true;
3507 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3508 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3509 || TREE_CODE (t) == COMPONENT_REF
3510 || TREE_CODE (t) == REALPART_EXPR
3511 || TREE_CODE (t) == IMAGPART_EXPR)
3512 t = TREE_OPERAND (t, 0);
3514 if (DECL_P (t))
3515 return true;
3517 return false;
3521 /* Called via walk_trees. Verify tree sharing. */
3523 static tree
3524 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3526 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3528 if (tree_node_can_be_shared (*tp))
3530 *walk_subtrees = false;
3531 return NULL;
3534 if (pointer_set_insert (visited, *tp))
3535 return *tp;
3537 return NULL;
3541 /* Helper function for verify_gimple_tuples. */
3543 static tree
3544 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3545 void *data ATTRIBUTE_UNUSED)
3547 switch (TREE_CODE (*tp))
3549 case MODIFY_EXPR:
3550 error ("unexpected non-tuple");
3551 debug_tree (*tp);
3552 gcc_unreachable ();
3553 return NULL_TREE;
3555 default:
3556 return NULL_TREE;
3560 /* Verify that there are no trees that should have been converted to
3561 gimple tuples. Return true if T contains a node that should have
3562 been converted to a gimple tuple, but hasn't. */
3564 static bool
3565 verify_gimple_tuples (tree t)
3567 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
3570 static bool eh_error_found;
3571 static int
3572 verify_eh_throw_stmt_node (void **slot, void *data)
3574 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
3575 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3577 if (!pointer_set_contains (visited, node->stmt))
3579 error ("Dead STMT in EH table");
3580 debug_generic_stmt (node->stmt);
3581 eh_error_found = true;
3583 return 0;
3586 /* Verify the GIMPLE statement chain. */
3588 void
3589 verify_stmts (void)
3591 basic_block bb;
3592 block_stmt_iterator bsi;
3593 bool err = false;
3594 struct pointer_set_t *visited, *visited_stmts;
3595 tree addr;
3597 timevar_push (TV_TREE_STMT_VERIFY);
3598 visited = pointer_set_create ();
3599 visited_stmts = pointer_set_create ();
3601 FOR_EACH_BB (bb)
3603 tree phi;
3604 int i;
3606 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3608 int phi_num_args = PHI_NUM_ARGS (phi);
3610 pointer_set_insert (visited_stmts, phi);
3611 if (bb_for_stmt (phi) != bb)
3613 error ("bb_for_stmt (phi) is set to a wrong basic block");
3614 err |= true;
3617 for (i = 0; i < phi_num_args; i++)
3619 tree t = PHI_ARG_DEF (phi, i);
3620 tree addr;
3622 /* Addressable variables do have SSA_NAMEs but they
3623 are not considered gimple values. */
3624 if (TREE_CODE (t) != SSA_NAME
3625 && TREE_CODE (t) != FUNCTION_DECL
3626 && !is_gimple_val (t))
3628 error ("PHI def is not a GIMPLE value");
3629 debug_generic_stmt (phi);
3630 debug_generic_stmt (t);
3631 err |= true;
3634 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3635 if (addr)
3637 debug_generic_stmt (addr);
3638 err |= true;
3641 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
3642 if (addr)
3644 error ("incorrect sharing of tree nodes");
3645 debug_generic_stmt (phi);
3646 debug_generic_stmt (addr);
3647 err |= true;
3652 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3654 tree stmt = bsi_stmt (bsi);
3656 pointer_set_insert (visited_stmts, stmt);
3657 err |= verify_gimple_tuples (stmt);
3659 if (bb_for_stmt (stmt) != bb)
3661 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3662 err |= true;
3665 bsi_next (&bsi);
3666 err |= verify_stmt (stmt, bsi_end_p (bsi));
3667 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
3668 if (addr)
3670 error ("incorrect sharing of tree nodes");
3671 debug_generic_stmt (stmt);
3672 debug_generic_stmt (addr);
3673 err |= true;
3677 eh_error_found = false;
3678 if (get_eh_throw_stmt_table (cfun))
3679 htab_traverse (get_eh_throw_stmt_table (cfun),
3680 verify_eh_throw_stmt_node,
3681 visited_stmts);
3683 if (err | eh_error_found)
3684 internal_error ("verify_stmts failed");
3686 pointer_set_destroy (visited);
3687 pointer_set_destroy (visited_stmts);
3688 verify_histograms ();
3689 timevar_pop (TV_TREE_STMT_VERIFY);
3693 /* Verifies that the flow information is OK. */
3695 static int
3696 tree_verify_flow_info (void)
3698 int err = 0;
3699 basic_block bb;
3700 block_stmt_iterator bsi;
3701 tree stmt;
3702 edge e;
3703 edge_iterator ei;
3705 if (ENTRY_BLOCK_PTR->stmt_list)
3707 error ("ENTRY_BLOCK has a statement list associated with it");
3708 err = 1;
3711 if (EXIT_BLOCK_PTR->stmt_list)
3713 error ("EXIT_BLOCK has a statement list associated with it");
3714 err = 1;
3717 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3718 if (e->flags & EDGE_FALLTHRU)
3720 error ("fallthru to exit from bb %d", e->src->index);
3721 err = 1;
3724 FOR_EACH_BB (bb)
3726 bool found_ctrl_stmt = false;
3728 stmt = NULL_TREE;
3730 /* Skip labels on the start of basic block. */
3731 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3733 tree prev_stmt = stmt;
3735 stmt = bsi_stmt (bsi);
3737 if (TREE_CODE (stmt) != LABEL_EXPR)
3738 break;
3740 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3742 error ("nonlocal label ");
3743 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3744 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3745 bb->index);
3746 err = 1;
3749 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3751 error ("label ");
3752 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3753 fprintf (stderr, " to block does not match in bb %d",
3754 bb->index);
3755 err = 1;
3758 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3759 != current_function_decl)
3761 error ("label ");
3762 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3763 fprintf (stderr, " has incorrect context in bb %d",
3764 bb->index);
3765 err = 1;
3769 /* Verify that body of basic block BB is free of control flow. */
3770 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3772 tree stmt = bsi_stmt (bsi);
3774 if (found_ctrl_stmt)
3776 error ("control flow in the middle of basic block %d",
3777 bb->index);
3778 err = 1;
3781 if (stmt_ends_bb_p (stmt))
3782 found_ctrl_stmt = true;
3784 if (TREE_CODE (stmt) == LABEL_EXPR)
3786 error ("label ");
3787 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3788 fprintf (stderr, " in the middle of basic block %d", bb->index);
3789 err = 1;
3793 bsi = bsi_last (bb);
3794 if (bsi_end_p (bsi))
3795 continue;
3797 stmt = bsi_stmt (bsi);
3799 err |= verify_eh_edges (stmt);
3801 if (is_ctrl_stmt (stmt))
3803 FOR_EACH_EDGE (e, ei, bb->succs)
3804 if (e->flags & EDGE_FALLTHRU)
3806 error ("fallthru edge after a control statement in bb %d",
3807 bb->index);
3808 err = 1;
3812 if (TREE_CODE (stmt) != COND_EXPR)
3814 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3815 after anything else but if statement. */
3816 FOR_EACH_EDGE (e, ei, bb->succs)
3817 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3819 error ("true/false edge after a non-COND_EXPR in bb %d",
3820 bb->index);
3821 err = 1;
3825 switch (TREE_CODE (stmt))
3827 case COND_EXPR:
3829 edge true_edge;
3830 edge false_edge;
3831 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3832 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3834 error ("structured COND_EXPR at the end of bb %d", bb->index);
3835 err = 1;
3838 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3840 if (!true_edge || !false_edge
3841 || !(true_edge->flags & EDGE_TRUE_VALUE)
3842 || !(false_edge->flags & EDGE_FALSE_VALUE)
3843 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3844 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3845 || EDGE_COUNT (bb->succs) >= 3)
3847 error ("wrong outgoing edge flags at end of bb %d",
3848 bb->index);
3849 err = 1;
3852 if (!has_label_p (true_edge->dest,
3853 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3855 error ("%<then%> label does not match edge at end of bb %d",
3856 bb->index);
3857 err = 1;
3860 if (!has_label_p (false_edge->dest,
3861 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3863 error ("%<else%> label does not match edge at end of bb %d",
3864 bb->index);
3865 err = 1;
3868 break;
3870 case GOTO_EXPR:
3871 if (simple_goto_p (stmt))
3873 error ("explicit goto at end of bb %d", bb->index);
3874 err = 1;
3876 else
3878 /* FIXME. We should double check that the labels in the
3879 destination blocks have their address taken. */
3880 FOR_EACH_EDGE (e, ei, bb->succs)
3881 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3882 | EDGE_FALSE_VALUE))
3883 || !(e->flags & EDGE_ABNORMAL))
3885 error ("wrong outgoing edge flags at end of bb %d",
3886 bb->index);
3887 err = 1;
3890 break;
3892 case RETURN_EXPR:
3893 if (!single_succ_p (bb)
3894 || (single_succ_edge (bb)->flags
3895 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3896 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3898 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3899 err = 1;
3901 if (single_succ (bb) != EXIT_BLOCK_PTR)
3903 error ("return edge does not point to exit in bb %d",
3904 bb->index);
3905 err = 1;
3907 break;
3909 case SWITCH_EXPR:
3911 tree prev;
3912 edge e;
3913 size_t i, n;
3914 tree vec;
3916 vec = SWITCH_LABELS (stmt);
3917 n = TREE_VEC_LENGTH (vec);
3919 /* Mark all the destination basic blocks. */
3920 for (i = 0; i < n; ++i)
3922 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3923 basic_block label_bb = label_to_block (lab);
3925 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3926 label_bb->aux = (void *)1;
3929 /* Verify that the case labels are sorted. */
3930 prev = TREE_VEC_ELT (vec, 0);
3931 for (i = 1; i < n - 1; ++i)
3933 tree c = TREE_VEC_ELT (vec, i);
3934 if (! CASE_LOW (c))
3936 error ("found default case not at end of case vector");
3937 err = 1;
3938 continue;
3940 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3942 error ("case labels not sorted: ");
3943 print_generic_expr (stderr, prev, 0);
3944 fprintf (stderr," is greater than ");
3945 print_generic_expr (stderr, c, 0);
3946 fprintf (stderr," but comes before it.\n");
3947 err = 1;
3949 prev = c;
3951 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3953 error ("no default case found at end of case vector");
3954 err = 1;
3957 FOR_EACH_EDGE (e, ei, bb->succs)
3959 if (!e->dest->aux)
3961 error ("extra outgoing edge %d->%d",
3962 bb->index, e->dest->index);
3963 err = 1;
3965 e->dest->aux = (void *)2;
3966 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3967 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3969 error ("wrong outgoing edge flags at end of bb %d",
3970 bb->index);
3971 err = 1;
3975 /* Check that we have all of them. */
3976 for (i = 0; i < n; ++i)
3978 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3979 basic_block label_bb = label_to_block (lab);
3981 if (label_bb->aux != (void *)2)
3983 error ("missing edge %i->%i",
3984 bb->index, label_bb->index);
3985 err = 1;
3989 FOR_EACH_EDGE (e, ei, bb->succs)
3990 e->dest->aux = (void *)0;
3993 default: ;
3997 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3998 verify_dominators (CDI_DOMINATORS);
4000 return err;
4004 /* Updates phi nodes after creating a forwarder block joined
4005 by edge FALLTHRU. */
4007 static void
4008 tree_make_forwarder_block (edge fallthru)
4010 edge e;
4011 edge_iterator ei;
4012 basic_block dummy, bb;
4013 tree phi, new_phi, var;
4015 dummy = fallthru->src;
4016 bb = fallthru->dest;
4018 if (single_pred_p (bb))
4019 return;
4021 /* If we redirected a branch we must create new PHI nodes at the
4022 start of BB. */
4023 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4025 var = PHI_RESULT (phi);
4026 new_phi = create_phi_node (var, bb);
4027 SSA_NAME_DEF_STMT (var) = new_phi;
4028 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4029 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4032 /* Ensure that the PHI node chain is in the same order. */
4033 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4035 /* Add the arguments we have stored on edges. */
4036 FOR_EACH_EDGE (e, ei, bb->preds)
4038 if (e == fallthru)
4039 continue;
4041 flush_pending_stmts (e);
4046 /* Return a non-special label in the head of basic block BLOCK.
4047 Create one if it doesn't exist. */
4049 tree
4050 tree_block_label (basic_block bb)
4052 block_stmt_iterator i, s = bsi_start (bb);
4053 bool first = true;
4054 tree label, stmt;
4056 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4058 stmt = bsi_stmt (i);
4059 if (TREE_CODE (stmt) != LABEL_EXPR)
4060 break;
4061 label = LABEL_EXPR_LABEL (stmt);
4062 if (!DECL_NONLOCAL (label))
4064 if (!first)
4065 bsi_move_before (&i, &s);
4066 return label;
4070 label = create_artificial_label ();
4071 stmt = build1 (LABEL_EXPR, void_type_node, label);
4072 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4073 return label;
4077 /* Attempt to perform edge redirection by replacing a possibly complex
4078 jump instruction by a goto or by removing the jump completely.
4079 This can apply only if all edges now point to the same block. The
4080 parameters and return values are equivalent to
4081 redirect_edge_and_branch. */
4083 static edge
4084 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4086 basic_block src = e->src;
4087 block_stmt_iterator b;
4088 tree stmt;
4090 /* We can replace or remove a complex jump only when we have exactly
4091 two edges. */
4092 if (EDGE_COUNT (src->succs) != 2
4093 /* Verify that all targets will be TARGET. Specifically, the
4094 edge that is not E must also go to TARGET. */
4095 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4096 return NULL;
4098 b = bsi_last (src);
4099 if (bsi_end_p (b))
4100 return NULL;
4101 stmt = bsi_stmt (b);
4103 if (TREE_CODE (stmt) == COND_EXPR
4104 || TREE_CODE (stmt) == SWITCH_EXPR)
4106 bsi_remove (&b, true);
4107 e = ssa_redirect_edge (e, target);
4108 e->flags = EDGE_FALLTHRU;
4109 return e;
4112 return NULL;
4116 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4117 edge representing the redirected branch. */
4119 static edge
4120 tree_redirect_edge_and_branch (edge e, basic_block dest)
4122 basic_block bb = e->src;
4123 block_stmt_iterator bsi;
4124 edge ret;
4125 tree label, stmt;
4127 if (e->flags & EDGE_ABNORMAL)
4128 return NULL;
4130 if (e->src != ENTRY_BLOCK_PTR
4131 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4132 return ret;
4134 if (e->dest == dest)
4135 return NULL;
4137 label = tree_block_label (dest);
4139 bsi = bsi_last (bb);
4140 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4142 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4144 case COND_EXPR:
4145 stmt = (e->flags & EDGE_TRUE_VALUE
4146 ? COND_EXPR_THEN (stmt)
4147 : COND_EXPR_ELSE (stmt));
4148 GOTO_DESTINATION (stmt) = label;
4149 break;
4151 case GOTO_EXPR:
4152 /* No non-abnormal edges should lead from a non-simple goto, and
4153 simple ones should be represented implicitly. */
4154 gcc_unreachable ();
4156 case SWITCH_EXPR:
4158 tree cases = get_cases_for_edge (e, stmt);
4160 /* If we have a list of cases associated with E, then use it
4161 as it's a lot faster than walking the entire case vector. */
4162 if (cases)
4164 edge e2 = find_edge (e->src, dest);
4165 tree last, first;
4167 first = cases;
4168 while (cases)
4170 last = cases;
4171 CASE_LABEL (cases) = label;
4172 cases = TREE_CHAIN (cases);
4175 /* If there was already an edge in the CFG, then we need
4176 to move all the cases associated with E to E2. */
4177 if (e2)
4179 tree cases2 = get_cases_for_edge (e2, stmt);
4181 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4182 TREE_CHAIN (cases2) = first;
4185 else
4187 tree vec = SWITCH_LABELS (stmt);
4188 size_t i, n = TREE_VEC_LENGTH (vec);
4190 for (i = 0; i < n; i++)
4192 tree elt = TREE_VEC_ELT (vec, i);
4194 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4195 CASE_LABEL (elt) = label;
4199 break;
4202 case RETURN_EXPR:
4203 bsi_remove (&bsi, true);
4204 e->flags |= EDGE_FALLTHRU;
4205 break;
4207 default:
4208 /* Otherwise it must be a fallthru edge, and we don't need to
4209 do anything besides redirecting it. */
4210 gcc_assert (e->flags & EDGE_FALLTHRU);
4211 break;
4214 /* Update/insert PHI nodes as necessary. */
4216 /* Now update the edges in the CFG. */
4217 e = ssa_redirect_edge (e, dest);
4219 return e;
4222 /* Returns true if it is possible to remove edge E by redirecting
4223 it to the destination of the other edge from E->src. */
4225 static bool
4226 tree_can_remove_branch_p (edge e)
4228 if (e->flags & EDGE_ABNORMAL)
4229 return false;
4231 return true;
4234 /* Simple wrapper, as we can always redirect fallthru edges. */
4236 static basic_block
4237 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4239 e = tree_redirect_edge_and_branch (e, dest);
4240 gcc_assert (e);
4242 return NULL;
4246 /* Splits basic block BB after statement STMT (but at least after the
4247 labels). If STMT is NULL, BB is split just after the labels. */
4249 static basic_block
4250 tree_split_block (basic_block bb, void *stmt)
4252 block_stmt_iterator bsi;
4253 tree_stmt_iterator tsi_tgt;
4254 tree act;
4255 basic_block new_bb;
4256 edge e;
4257 edge_iterator ei;
4259 new_bb = create_empty_bb (bb);
4261 /* Redirect the outgoing edges. */
4262 new_bb->succs = bb->succs;
4263 bb->succs = NULL;
4264 FOR_EACH_EDGE (e, ei, new_bb->succs)
4265 e->src = new_bb;
4267 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4268 stmt = NULL;
4270 /* Move everything from BSI to the new basic block. */
4271 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4273 act = bsi_stmt (bsi);
4274 if (TREE_CODE (act) == LABEL_EXPR)
4275 continue;
4277 if (!stmt)
4278 break;
4280 if (stmt == act)
4282 bsi_next (&bsi);
4283 break;
4287 if (bsi_end_p (bsi))
4288 return new_bb;
4290 /* Split the statement list - avoid re-creating new containers as this
4291 brings ugly quadratic memory consumption in the inliner.
4292 (We are still quadratic since we need to update stmt BB pointers,
4293 sadly.) */
4294 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4295 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4296 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4297 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4299 return new_bb;
4303 /* Moves basic block BB after block AFTER. */
4305 static bool
4306 tree_move_block_after (basic_block bb, basic_block after)
4308 if (bb->prev_bb == after)
4309 return true;
4311 unlink_block (bb);
4312 link_block (bb, after);
4314 return true;
4318 /* Return true if basic_block can be duplicated. */
4320 static bool
4321 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4323 return true;
4327 /* Create a duplicate of the basic block BB. NOTE: This does not
4328 preserve SSA form. */
4330 static basic_block
4331 tree_duplicate_bb (basic_block bb)
4333 basic_block new_bb;
4334 block_stmt_iterator bsi, bsi_tgt;
4335 tree phi;
4337 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4339 /* Copy the PHI nodes. We ignore PHI node arguments here because
4340 the incoming edges have not been setup yet. */
4341 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4343 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4344 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4347 /* Keep the chain of PHI nodes in the same order so that they can be
4348 updated by ssa_redirect_edge. */
4349 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4351 bsi_tgt = bsi_start (new_bb);
4352 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4354 def_operand_p def_p;
4355 ssa_op_iter op_iter;
4356 tree stmt, copy;
4357 int region;
4359 stmt = bsi_stmt (bsi);
4360 if (TREE_CODE (stmt) == LABEL_EXPR)
4361 continue;
4363 /* Create a new copy of STMT and duplicate STMT's virtual
4364 operands. */
4365 copy = unshare_expr (stmt);
4366 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4367 copy_virtual_operands (copy, stmt);
4368 region = lookup_stmt_eh_region (stmt);
4369 if (region >= 0)
4370 add_stmt_to_eh_region (copy, region);
4371 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4373 /* Create new names for all the definitions created by COPY and
4374 add replacement mappings for each new name. */
4375 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4376 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4379 return new_bb;
4383 /* Basic block BB_COPY was created by code duplication. Add phi node
4384 arguments for edges going out of BB_COPY. The blocks that were
4385 duplicated have BB_DUPLICATED set. */
4387 void
4388 add_phi_args_after_copy_bb (basic_block bb_copy)
4390 basic_block bb, dest;
4391 edge e, e_copy;
4392 edge_iterator ei;
4393 tree phi, phi_copy, phi_next, def;
4395 bb = get_bb_original (bb_copy);
4397 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4399 if (!phi_nodes (e_copy->dest))
4400 continue;
4402 if (e_copy->dest->flags & BB_DUPLICATED)
4403 dest = get_bb_original (e_copy->dest);
4404 else
4405 dest = e_copy->dest;
4407 e = find_edge (bb, dest);
4408 if (!e)
4410 /* During loop unrolling the target of the latch edge is copied.
4411 In this case we are not looking for edge to dest, but to
4412 duplicated block whose original was dest. */
4413 FOR_EACH_EDGE (e, ei, bb->succs)
4414 if ((e->dest->flags & BB_DUPLICATED)
4415 && get_bb_original (e->dest) == dest)
4416 break;
4418 gcc_assert (e != NULL);
4421 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4422 phi;
4423 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4425 phi_next = PHI_CHAIN (phi);
4426 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4427 add_phi_arg (phi_copy, def, e_copy);
4432 /* Blocks in REGION_COPY array of length N_REGION were created by
4433 duplication of basic blocks. Add phi node arguments for edges
4434 going from these blocks. */
4436 void
4437 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4439 unsigned i;
4441 for (i = 0; i < n_region; i++)
4442 region_copy[i]->flags |= BB_DUPLICATED;
4444 for (i = 0; i < n_region; i++)
4445 add_phi_args_after_copy_bb (region_copy[i]);
4447 for (i = 0; i < n_region; i++)
4448 region_copy[i]->flags &= ~BB_DUPLICATED;
4451 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4452 important exit edge EXIT. By important we mean that no SSA name defined
4453 inside region is live over the other exit edges of the region. All entry
4454 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4455 to the duplicate of the region. SSA form, dominance and loop information
4456 is updated. The new basic blocks are stored to REGION_COPY in the same
4457 order as they had in REGION, provided that REGION_COPY is not NULL.
4458 The function returns false if it is unable to copy the region,
4459 true otherwise. */
4461 bool
4462 tree_duplicate_sese_region (edge entry, edge exit,
4463 basic_block *region, unsigned n_region,
4464 basic_block *region_copy)
4466 unsigned i, n_doms;
4467 bool free_region_copy = false, copying_header = false;
4468 struct loop *loop = entry->dest->loop_father;
4469 edge exit_copy;
4470 basic_block *doms;
4471 edge redirected;
4472 int total_freq = 0, entry_freq = 0;
4473 gcov_type total_count = 0, entry_count = 0;
4475 if (!can_copy_bbs_p (region, n_region))
4476 return false;
4478 /* Some sanity checking. Note that we do not check for all possible
4479 missuses of the functions. I.e. if you ask to copy something weird,
4480 it will work, but the state of structures probably will not be
4481 correct. */
4482 for (i = 0; i < n_region; i++)
4484 /* We do not handle subloops, i.e. all the blocks must belong to the
4485 same loop. */
4486 if (region[i]->loop_father != loop)
4487 return false;
4489 if (region[i] != entry->dest
4490 && region[i] == loop->header)
4491 return false;
4494 loop->copy = loop;
4496 /* In case the function is used for loop header copying (which is the primary
4497 use), ensure that EXIT and its copy will be new latch and entry edges. */
4498 if (loop->header == entry->dest)
4500 copying_header = true;
4501 loop->copy = loop->outer;
4503 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4504 return false;
4506 for (i = 0; i < n_region; i++)
4507 if (region[i] != exit->src
4508 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4509 return false;
4512 if (!region_copy)
4514 region_copy = XNEWVEC (basic_block, n_region);
4515 free_region_copy = true;
4518 gcc_assert (!need_ssa_update_p ());
4520 /* Record blocks outside the region that are dominated by something
4521 inside. */
4522 doms = XNEWVEC (basic_block, n_basic_blocks);
4523 initialize_original_copy_tables ();
4525 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4527 if (entry->dest->count)
4529 total_count = entry->dest->count;
4530 entry_count = entry->count;
4531 /* Fix up corner cases, to avoid division by zero or creation of negative
4532 frequencies. */
4533 if (entry_count > total_count)
4534 entry_count = total_count;
4536 else
4538 total_freq = entry->dest->frequency;
4539 entry_freq = EDGE_FREQUENCY (entry);
4540 /* Fix up corner cases, to avoid division by zero or creation of negative
4541 frequencies. */
4542 if (total_freq == 0)
4543 total_freq = 1;
4544 else if (entry_freq > total_freq)
4545 entry_freq = total_freq;
4548 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4549 split_edge_bb_loc (entry));
4550 if (total_count)
4552 scale_bbs_frequencies_gcov_type (region, n_region,
4553 total_count - entry_count,
4554 total_count);
4555 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4556 total_count);
4558 else
4560 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4561 total_freq);
4562 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4565 if (copying_header)
4567 loop->header = exit->dest;
4568 loop->latch = exit->src;
4571 /* Redirect the entry and add the phi node arguments. */
4572 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4573 gcc_assert (redirected != NULL);
4574 flush_pending_stmts (entry);
4576 /* Concerning updating of dominators: We must recount dominators
4577 for entry block and its copy. Anything that is outside of the
4578 region, but was dominated by something inside needs recounting as
4579 well. */
4580 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4581 doms[n_doms++] = get_bb_original (entry->dest);
4582 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4583 free (doms);
4585 /* Add the other PHI node arguments. */
4586 add_phi_args_after_copy (region_copy, n_region);
4588 /* Update the SSA web. */
4589 update_ssa (TODO_update_ssa);
4591 if (free_region_copy)
4592 free (region_copy);
4594 free_original_copy_tables ();
4595 return true;
4599 DEF_VEC_P(basic_block);
4600 DEF_VEC_ALLOC_P(basic_block,heap);
4603 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4604 adding blocks when the dominator traversal reaches EXIT. This
4605 function silently assumes that ENTRY strictly dominates EXIT. */
4607 static void
4608 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4609 VEC(basic_block,heap) **bbs_p)
4611 basic_block son;
4613 for (son = first_dom_son (CDI_DOMINATORS, entry);
4614 son;
4615 son = next_dom_son (CDI_DOMINATORS, son))
4617 VEC_safe_push (basic_block, heap, *bbs_p, son);
4618 if (son != exit)
4619 gather_blocks_in_sese_region (son, exit, bbs_p);
4624 struct move_stmt_d
4626 tree block;
4627 tree from_context;
4628 tree to_context;
4629 bitmap vars_to_remove;
4630 htab_t new_label_map;
4631 bool remap_decls_p;
4634 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4635 contained in *TP and change the DECL_CONTEXT of every local
4636 variable referenced in *TP. */
4638 static tree
4639 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4641 struct move_stmt_d *p = (struct move_stmt_d *) data;
4642 tree t = *tp;
4644 if (p->block
4645 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
4646 TREE_BLOCK (t) = p->block;
4648 if (OMP_DIRECTIVE_P (t)
4649 && TREE_CODE (t) != OMP_RETURN
4650 && TREE_CODE (t) != OMP_CONTINUE)
4652 /* Do not remap variables inside OMP directives. Variables
4653 referenced in clauses and directive header belong to the
4654 parent function and should not be moved into the child
4655 function. */
4656 bool save_remap_decls_p = p->remap_decls_p;
4657 p->remap_decls_p = false;
4658 *walk_subtrees = 0;
4660 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4662 p->remap_decls_p = save_remap_decls_p;
4664 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4666 if (TREE_CODE (t) == LABEL_DECL)
4668 if (p->new_label_map)
4670 struct tree_map in, *out;
4671 in.from = t;
4672 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4673 if (out)
4674 *tp = t = out->to;
4677 DECL_CONTEXT (t) = p->to_context;
4679 else if (p->remap_decls_p)
4681 DECL_CONTEXT (t) = p->to_context;
4683 if (TREE_CODE (t) == VAR_DECL)
4685 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4686 f->unexpanded_var_list
4687 = tree_cons (0, t, f->unexpanded_var_list);
4689 /* Mark T to be removed from the original function,
4690 otherwise it will be given a DECL_RTL when the
4691 original function is expanded. */
4692 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4696 else if (TYPE_P (t))
4697 *walk_subtrees = 0;
4699 return NULL_TREE;
4703 /* Move basic block BB from function CFUN to function DEST_FN. The
4704 block is moved out of the original linked list and placed after
4705 block AFTER in the new list. Also, the block is removed from the
4706 original array of blocks and placed in DEST_FN's array of blocks.
4707 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4708 updated to reflect the moved edges.
4710 On exit, local variables that need to be removed from
4711 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4713 static void
4714 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4715 basic_block after, bool update_edge_count_p,
4716 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4718 struct control_flow_graph *cfg;
4719 edge_iterator ei;
4720 edge e;
4721 block_stmt_iterator si;
4722 struct move_stmt_d d;
4723 unsigned old_len, new_len;
4725 /* Link BB to the new linked list. */
4726 move_block_after (bb, after);
4728 /* Update the edge count in the corresponding flowgraphs. */
4729 if (update_edge_count_p)
4730 FOR_EACH_EDGE (e, ei, bb->succs)
4732 cfun->cfg->x_n_edges--;
4733 dest_cfun->cfg->x_n_edges++;
4736 /* Remove BB from the original basic block array. */
4737 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4738 cfun->cfg->x_n_basic_blocks--;
4740 /* Grow DEST_CFUN's basic block array if needed. */
4741 cfg = dest_cfun->cfg;
4742 cfg->x_n_basic_blocks++;
4743 if (bb->index > cfg->x_last_basic_block)
4744 cfg->x_last_basic_block = bb->index;
4746 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4747 if ((unsigned) cfg->x_last_basic_block >= old_len)
4749 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4750 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
4751 new_len);
4754 VEC_replace (basic_block, cfg->x_basic_block_info,
4755 cfg->x_last_basic_block, bb);
4757 /* The statements in BB need to be associated with a new TREE_BLOCK.
4758 Labels need to be associated with a new label-to-block map. */
4759 memset (&d, 0, sizeof (d));
4760 d.vars_to_remove = vars_to_remove;
4762 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4764 tree stmt = bsi_stmt (si);
4765 int region;
4767 d.from_context = cfun->decl;
4768 d.to_context = dest_cfun->decl;
4769 d.remap_decls_p = true;
4770 d.new_label_map = new_label_map;
4771 if (TREE_BLOCK (stmt))
4772 d.block = DECL_INITIAL (dest_cfun->decl);
4774 walk_tree (&stmt, move_stmt_r, &d, NULL);
4776 if (TREE_CODE (stmt) == LABEL_EXPR)
4778 tree label = LABEL_EXPR_LABEL (stmt);
4779 int uid = LABEL_DECL_UID (label);
4781 gcc_assert (uid > -1);
4783 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4784 if (old_len <= (unsigned) uid)
4786 new_len = 3 * uid / 2;
4787 VEC_safe_grow_cleared (basic_block, gc,
4788 cfg->x_label_to_block_map, new_len);
4791 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4792 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4794 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4796 if (uid >= dest_cfun->last_label_uid)
4797 dest_cfun->last_label_uid = uid + 1;
4799 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4800 TREE_OPERAND (stmt, 0) =
4801 build_int_cst (NULL_TREE,
4802 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4803 + eh_offset);
4805 region = lookup_stmt_eh_region (stmt);
4806 if (region >= 0)
4808 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4809 remove_stmt_from_eh_region (stmt);
4810 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
4811 gimple_remove_stmt_histograms (cfun, stmt);
4816 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4817 the outermost EH region. Use REGION as the incoming base EH region. */
4819 static int
4820 find_outermost_region_in_block (struct function *src_cfun,
4821 basic_block bb, int region)
4823 block_stmt_iterator si;
4825 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4827 tree stmt = bsi_stmt (si);
4828 int stmt_region;
4830 if (TREE_CODE (stmt) == RESX_EXPR)
4831 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4832 else
4833 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4834 if (stmt_region > 0)
4836 if (region < 0)
4837 region = stmt_region;
4838 else if (stmt_region != region)
4840 region = eh_region_outermost (src_cfun, stmt_region, region);
4841 gcc_assert (region != -1);
4846 return region;
4849 static tree
4850 new_label_mapper (tree decl, void *data)
4852 htab_t hash = (htab_t) data;
4853 struct tree_map *m;
4854 void **slot;
4856 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4858 m = xmalloc (sizeof (struct tree_map));
4859 m->hash = DECL_UID (decl);
4860 m->from = decl;
4861 m->to = create_artificial_label ();
4862 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4864 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4865 gcc_assert (*slot == NULL);
4867 *slot = m;
4869 return m->to;
4872 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4873 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4874 single basic block in the original CFG and the new basic block is
4875 returned. DEST_CFUN must not have a CFG yet.
4877 Note that the region need not be a pure SESE region. Blocks inside
4878 the region may contain calls to abort/exit. The only restriction
4879 is that ENTRY_BB should be the only entry point and it must
4880 dominate EXIT_BB.
4882 All local variables referenced in the region are assumed to be in
4883 the corresponding BLOCK_VARS and unexpanded variable lists
4884 associated with DEST_CFUN. */
4886 basic_block
4887 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4888 basic_block exit_bb)
4890 VEC(basic_block,heap) *bbs;
4891 basic_block after, bb, *entry_pred, *exit_succ;
4892 struct function *saved_cfun;
4893 int *entry_flag, *exit_flag, eh_offset;
4894 unsigned i, num_entry_edges, num_exit_edges;
4895 edge e;
4896 edge_iterator ei;
4897 bitmap vars_to_remove;
4898 htab_t new_label_map;
4900 saved_cfun = cfun;
4902 /* Collect all the blocks in the region. Manually add ENTRY_BB
4903 because it won't be added by dfs_enumerate_from. */
4904 calculate_dominance_info (CDI_DOMINATORS);
4906 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4907 region. */
4908 gcc_assert (entry_bb != exit_bb
4909 && (!exit_bb
4910 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4912 bbs = NULL;
4913 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4914 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4916 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4917 the predecessor edges to ENTRY_BB and the successor edges to
4918 EXIT_BB so that we can re-attach them to the new basic block that
4919 will replace the region. */
4920 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4921 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4922 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4923 i = 0;
4924 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4926 entry_flag[i] = e->flags;
4927 entry_pred[i++] = e->src;
4928 remove_edge (e);
4931 if (exit_bb)
4933 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4934 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4935 sizeof (basic_block));
4936 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4937 i = 0;
4938 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4940 exit_flag[i] = e->flags;
4941 exit_succ[i++] = e->dest;
4942 remove_edge (e);
4945 else
4947 num_exit_edges = 0;
4948 exit_succ = NULL;
4949 exit_flag = NULL;
4952 /* Switch context to the child function to initialize DEST_FN's CFG. */
4953 gcc_assert (dest_cfun->cfg == NULL);
4954 cfun = dest_cfun;
4956 init_empty_tree_cfg ();
4958 /* Initialize EH information for the new function. */
4959 eh_offset = 0;
4960 new_label_map = NULL;
4961 if (saved_cfun->eh)
4963 int region = -1;
4965 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4966 region = find_outermost_region_in_block (saved_cfun, bb, region);
4968 init_eh_for_function ();
4969 if (region != -1)
4971 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4972 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4973 new_label_map, region, 0);
4977 cfun = saved_cfun;
4979 /* Move blocks from BBS into DEST_CFUN. */
4980 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4981 after = dest_cfun->cfg->x_entry_block_ptr;
4982 vars_to_remove = BITMAP_ALLOC (NULL);
4983 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4985 /* No need to update edge counts on the last block. It has
4986 already been updated earlier when we detached the region from
4987 the original CFG. */
4988 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4989 new_label_map, eh_offset);
4990 after = bb;
4993 if (new_label_map)
4994 htab_delete (new_label_map);
4996 /* Remove the variables marked in VARS_TO_REMOVE from
4997 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4998 DECL_RTL in the context of CFUN. */
4999 if (!bitmap_empty_p (vars_to_remove))
5001 tree *p;
5003 for (p = &cfun->unexpanded_var_list; *p; )
5005 tree var = TREE_VALUE (*p);
5006 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
5008 *p = TREE_CHAIN (*p);
5009 continue;
5012 p = &TREE_CHAIN (*p);
5016 BITMAP_FREE (vars_to_remove);
5018 /* Rewire the entry and exit blocks. The successor to the entry
5019 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
5020 the child function. Similarly, the predecessor of DEST_FN's
5021 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
5022 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
5023 various CFG manipulation function get to the right CFG.
5025 FIXME, this is silly. The CFG ought to become a parameter to
5026 these helpers. */
5027 cfun = dest_cfun;
5028 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5029 if (exit_bb)
5030 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
5031 cfun = saved_cfun;
5033 /* Back in the original function, the SESE region has disappeared,
5034 create a new basic block in its place. */
5035 bb = create_empty_bb (entry_pred[0]);
5036 for (i = 0; i < num_entry_edges; i++)
5037 make_edge (entry_pred[i], bb, entry_flag[i]);
5039 for (i = 0; i < num_exit_edges; i++)
5040 make_edge (bb, exit_succ[i], exit_flag[i]);
5042 if (exit_bb)
5044 free (exit_flag);
5045 free (exit_succ);
5047 free (entry_flag);
5048 free (entry_pred);
5049 free_dominance_info (CDI_DOMINATORS);
5050 free_dominance_info (CDI_POST_DOMINATORS);
5051 VEC_free (basic_block, heap, bbs);
5053 return bb;
5057 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5059 void
5060 dump_function_to_file (tree fn, FILE *file, int flags)
5062 tree arg, vars, var;
5063 bool ignore_topmost_bind = false, any_var = false;
5064 basic_block bb;
5065 tree chain;
5066 struct function *saved_cfun;
5068 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5070 arg = DECL_ARGUMENTS (fn);
5071 while (arg)
5073 print_generic_expr (file, arg, dump_flags);
5074 if (TREE_CHAIN (arg))
5075 fprintf (file, ", ");
5076 arg = TREE_CHAIN (arg);
5078 fprintf (file, ")\n");
5080 if (flags & TDF_DETAILS)
5081 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5082 if (flags & TDF_RAW)
5084 dump_node (fn, TDF_SLIM | flags, file);
5085 return;
5088 /* Switch CFUN to point to FN. */
5089 saved_cfun = cfun;
5090 cfun = DECL_STRUCT_FUNCTION (fn);
5092 /* When GIMPLE is lowered, the variables are no longer available in
5093 BIND_EXPRs, so display them separately. */
5094 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5096 ignore_topmost_bind = true;
5098 fprintf (file, "{\n");
5099 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5101 var = TREE_VALUE (vars);
5103 print_generic_decl (file, var, flags);
5104 fprintf (file, "\n");
5106 any_var = true;
5110 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5112 /* Make a CFG based dump. */
5113 check_bb_profile (ENTRY_BLOCK_PTR, file);
5114 if (!ignore_topmost_bind)
5115 fprintf (file, "{\n");
5117 if (any_var && n_basic_blocks)
5118 fprintf (file, "\n");
5120 FOR_EACH_BB (bb)
5121 dump_generic_bb (file, bb, 2, flags);
5123 fprintf (file, "}\n");
5124 check_bb_profile (EXIT_BLOCK_PTR, file);
5126 else
5128 int indent;
5130 /* Make a tree based dump. */
5131 chain = DECL_SAVED_TREE (fn);
5133 if (chain && TREE_CODE (chain) == BIND_EXPR)
5135 if (ignore_topmost_bind)
5137 chain = BIND_EXPR_BODY (chain);
5138 indent = 2;
5140 else
5141 indent = 0;
5143 else
5145 if (!ignore_topmost_bind)
5146 fprintf (file, "{\n");
5147 indent = 2;
5150 if (any_var)
5151 fprintf (file, "\n");
5153 print_generic_stmt_indented (file, chain, flags, indent);
5154 if (ignore_topmost_bind)
5155 fprintf (file, "}\n");
5158 fprintf (file, "\n\n");
5160 /* Restore CFUN. */
5161 cfun = saved_cfun;
5165 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5167 void
5168 debug_function (tree fn, int flags)
5170 dump_function_to_file (fn, stderr, flags);
5174 /* Pretty print of the loops intermediate representation. */
5175 static void print_loop (FILE *, struct loop *, int);
5176 static void print_pred_bbs (FILE *, basic_block bb);
5177 static void print_succ_bbs (FILE *, basic_block bb);
5180 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5182 static void
5183 print_pred_bbs (FILE *file, basic_block bb)
5185 edge e;
5186 edge_iterator ei;
5188 FOR_EACH_EDGE (e, ei, bb->preds)
5189 fprintf (file, "bb_%d ", e->src->index);
5193 /* Print on FILE the indexes for the successors of basic_block BB. */
5195 static void
5196 print_succ_bbs (FILE *file, basic_block bb)
5198 edge e;
5199 edge_iterator ei;
5201 FOR_EACH_EDGE (e, ei, bb->succs)
5202 fprintf (file, "bb_%d ", e->dest->index);
5206 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5208 static void
5209 print_loop (FILE *file, struct loop *loop, int indent)
5211 char *s_indent;
5212 basic_block bb;
5214 if (loop == NULL)
5215 return;
5217 s_indent = (char *) alloca ((size_t) indent + 1);
5218 memset ((void *) s_indent, ' ', (size_t) indent);
5219 s_indent[indent] = '\0';
5221 /* Print the loop's header. */
5222 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5224 /* Print the loop's body. */
5225 fprintf (file, "%s{\n", s_indent);
5226 FOR_EACH_BB (bb)
5227 if (bb->loop_father == loop)
5229 /* Print the basic_block's header. */
5230 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5231 print_pred_bbs (file, bb);
5232 fprintf (file, "}, succs = {");
5233 print_succ_bbs (file, bb);
5234 fprintf (file, "})\n");
5236 /* Print the basic_block's body. */
5237 fprintf (file, "%s {\n", s_indent);
5238 tree_dump_bb (bb, file, indent + 4);
5239 fprintf (file, "%s }\n", s_indent);
5242 print_loop (file, loop->inner, indent + 2);
5243 fprintf (file, "%s}\n", s_indent);
5244 print_loop (file, loop->next, indent);
5248 /* Follow a CFG edge from the entry point of the program, and on entry
5249 of a loop, pretty print the loop structure on FILE. */
5251 void
5252 print_loop_ir (FILE *file)
5254 basic_block bb;
5256 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5257 if (bb && bb->loop_father)
5258 print_loop (file, bb->loop_father, 0);
5262 /* Debugging loops structure at tree level. */
5264 void
5265 debug_loop_ir (void)
5267 print_loop_ir (stderr);
5271 /* Return true if BB ends with a call, possibly followed by some
5272 instructions that must stay with the call. Return false,
5273 otherwise. */
5275 static bool
5276 tree_block_ends_with_call_p (basic_block bb)
5278 block_stmt_iterator bsi = bsi_last (bb);
5279 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5283 /* Return true if BB ends with a conditional branch. Return false,
5284 otherwise. */
5286 static bool
5287 tree_block_ends_with_condjump_p (basic_block bb)
5289 tree stmt = last_stmt (bb);
5290 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5294 /* Return true if we need to add fake edge to exit at statement T.
5295 Helper function for tree_flow_call_edges_add. */
5297 static bool
5298 need_fake_edge_p (tree t)
5300 tree call;
5302 /* NORETURN and LONGJMP calls already have an edge to exit.
5303 CONST and PURE calls do not need one.
5304 We don't currently check for CONST and PURE here, although
5305 it would be a good idea, because those attributes are
5306 figured out from the RTL in mark_constant_function, and
5307 the counter incrementation code from -fprofile-arcs
5308 leads to different results from -fbranch-probabilities. */
5309 call = get_call_expr_in (t);
5310 if (call
5311 && !(call_expr_flags (call) & ECF_NORETURN))
5312 return true;
5314 if (TREE_CODE (t) == ASM_EXPR
5315 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5316 return true;
5318 return false;
5322 /* Add fake edges to the function exit for any non constant and non
5323 noreturn calls, volatile inline assembly in the bitmap of blocks
5324 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5325 the number of blocks that were split.
5327 The goal is to expose cases in which entering a basic block does
5328 not imply that all subsequent instructions must be executed. */
5330 static int
5331 tree_flow_call_edges_add (sbitmap blocks)
5333 int i;
5334 int blocks_split = 0;
5335 int last_bb = last_basic_block;
5336 bool check_last_block = false;
5338 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5339 return 0;
5341 if (! blocks)
5342 check_last_block = true;
5343 else
5344 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5346 /* In the last basic block, before epilogue generation, there will be
5347 a fallthru edge to EXIT. Special care is required if the last insn
5348 of the last basic block is a call because make_edge folds duplicate
5349 edges, which would result in the fallthru edge also being marked
5350 fake, which would result in the fallthru edge being removed by
5351 remove_fake_edges, which would result in an invalid CFG.
5353 Moreover, we can't elide the outgoing fake edge, since the block
5354 profiler needs to take this into account in order to solve the minimal
5355 spanning tree in the case that the call doesn't return.
5357 Handle this by adding a dummy instruction in a new last basic block. */
5358 if (check_last_block)
5360 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5361 block_stmt_iterator bsi = bsi_last (bb);
5362 tree t = NULL_TREE;
5363 if (!bsi_end_p (bsi))
5364 t = bsi_stmt (bsi);
5366 if (t && need_fake_edge_p (t))
5368 edge e;
5370 e = find_edge (bb, EXIT_BLOCK_PTR);
5371 if (e)
5373 bsi_insert_on_edge (e, build_empty_stmt ());
5374 bsi_commit_edge_inserts ();
5379 /* Now add fake edges to the function exit for any non constant
5380 calls since there is no way that we can determine if they will
5381 return or not... */
5382 for (i = 0; i < last_bb; i++)
5384 basic_block bb = BASIC_BLOCK (i);
5385 block_stmt_iterator bsi;
5386 tree stmt, last_stmt;
5388 if (!bb)
5389 continue;
5391 if (blocks && !TEST_BIT (blocks, i))
5392 continue;
5394 bsi = bsi_last (bb);
5395 if (!bsi_end_p (bsi))
5397 last_stmt = bsi_stmt (bsi);
5400 stmt = bsi_stmt (bsi);
5401 if (need_fake_edge_p (stmt))
5403 edge e;
5404 /* The handling above of the final block before the
5405 epilogue should be enough to verify that there is
5406 no edge to the exit block in CFG already.
5407 Calling make_edge in such case would cause us to
5408 mark that edge as fake and remove it later. */
5409 #ifdef ENABLE_CHECKING
5410 if (stmt == last_stmt)
5412 e = find_edge (bb, EXIT_BLOCK_PTR);
5413 gcc_assert (e == NULL);
5415 #endif
5417 /* Note that the following may create a new basic block
5418 and renumber the existing basic blocks. */
5419 if (stmt != last_stmt)
5421 e = split_block (bb, stmt);
5422 if (e)
5423 blocks_split++;
5425 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5427 bsi_prev (&bsi);
5429 while (!bsi_end_p (bsi));
5433 if (blocks_split)
5434 verify_flow_info ();
5436 return blocks_split;
5439 /* Purge dead abnormal call edges from basic block BB. */
5441 bool
5442 tree_purge_dead_abnormal_call_edges (basic_block bb)
5444 bool changed = tree_purge_dead_eh_edges (bb);
5446 if (current_function_has_nonlocal_label)
5448 tree stmt = last_stmt (bb);
5449 edge_iterator ei;
5450 edge e;
5452 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5453 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5455 if (e->flags & EDGE_ABNORMAL)
5457 remove_edge (e);
5458 changed = true;
5460 else
5461 ei_next (&ei);
5464 /* See tree_purge_dead_eh_edges below. */
5465 if (changed)
5466 free_dominance_info (CDI_DOMINATORS);
5469 return changed;
5472 /* Purge dead EH edges from basic block BB. */
5474 bool
5475 tree_purge_dead_eh_edges (basic_block bb)
5477 bool changed = false;
5478 edge e;
5479 edge_iterator ei;
5480 tree stmt = last_stmt (bb);
5482 if (stmt && tree_can_throw_internal (stmt))
5483 return false;
5485 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5487 if (e->flags & EDGE_EH)
5489 remove_edge (e);
5490 changed = true;
5492 else
5493 ei_next (&ei);
5496 /* Removal of dead EH edges might change dominators of not
5497 just immediate successors. E.g. when bb1 is changed so that
5498 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5499 eh edges purged by this function in:
5503 1-->2
5504 / \ |
5505 v v |
5506 3-->4 |
5508 --->5
5511 idom(bb5) must be recomputed. For now just free the dominance
5512 info. */
5513 if (changed)
5514 free_dominance_info (CDI_DOMINATORS);
5516 return changed;
5519 bool
5520 tree_purge_all_dead_eh_edges (bitmap blocks)
5522 bool changed = false;
5523 unsigned i;
5524 bitmap_iterator bi;
5526 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5528 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5531 return changed;
5534 /* This function is called whenever a new edge is created or
5535 redirected. */
5537 static void
5538 tree_execute_on_growing_pred (edge e)
5540 basic_block bb = e->dest;
5542 if (phi_nodes (bb))
5543 reserve_phi_args_for_new_edge (bb);
5546 /* This function is called immediately before edge E is removed from
5547 the edge vector E->dest->preds. */
5549 static void
5550 tree_execute_on_shrinking_pred (edge e)
5552 if (phi_nodes (e->dest))
5553 remove_phi_args (e);
5556 /*---------------------------------------------------------------------------
5557 Helper functions for Loop versioning
5558 ---------------------------------------------------------------------------*/
5560 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5561 of 'first'. Both of them are dominated by 'new_head' basic block. When
5562 'new_head' was created by 'second's incoming edge it received phi arguments
5563 on the edge by split_edge(). Later, additional edge 'e' was created to
5564 connect 'new_head' and 'first'. Now this routine adds phi args on this
5565 additional edge 'e' that new_head to second edge received as part of edge
5566 splitting.
5569 static void
5570 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5571 basic_block new_head, edge e)
5573 tree phi1, phi2;
5574 edge e2 = find_edge (new_head, second);
5576 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5577 edge, we should always have an edge from NEW_HEAD to SECOND. */
5578 gcc_assert (e2 != NULL);
5580 /* Browse all 'second' basic block phi nodes and add phi args to
5581 edge 'e' for 'first' head. PHI args are always in correct order. */
5583 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5584 phi2 && phi1;
5585 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5587 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5588 add_phi_arg (phi1, def, e);
5592 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5593 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5594 the destination of the ELSE part. */
5595 static void
5596 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5597 basic_block cond_bb, void *cond_e)
5599 block_stmt_iterator bsi;
5600 tree goto1 = NULL_TREE;
5601 tree goto2 = NULL_TREE;
5602 tree new_cond_expr = NULL_TREE;
5603 tree cond_expr = (tree) cond_e;
5604 edge e0;
5606 /* Build new conditional expr */
5607 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5608 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5609 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5611 /* Add new cond in cond_bb. */
5612 bsi = bsi_start (cond_bb);
5613 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5614 /* Adjust edges appropriately to connect new head with first head
5615 as well as second head. */
5616 e0 = single_succ_edge (cond_bb);
5617 e0->flags &= ~EDGE_FALLTHRU;
5618 e0->flags |= EDGE_FALSE_VALUE;
5621 struct cfg_hooks tree_cfg_hooks = {
5622 "tree",
5623 tree_verify_flow_info,
5624 tree_dump_bb, /* dump_bb */
5625 create_bb, /* create_basic_block */
5626 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5627 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5628 tree_can_remove_branch_p, /* can_remove_branch_p */
5629 remove_bb, /* delete_basic_block */
5630 tree_split_block, /* split_block */
5631 tree_move_block_after, /* move_block_after */
5632 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5633 tree_merge_blocks, /* merge_blocks */
5634 tree_predict_edge, /* predict_edge */
5635 tree_predicted_by_p, /* predicted_by_p */
5636 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5637 tree_duplicate_bb, /* duplicate_block */
5638 tree_split_edge, /* split_edge */
5639 tree_make_forwarder_block, /* make_forward_block */
5640 NULL, /* tidy_fallthru_edge */
5641 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5642 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5643 tree_flow_call_edges_add, /* flow_call_edges_add */
5644 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5645 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5646 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5647 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5648 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5649 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5650 flush_pending_stmts /* flush_pending_stmts */
5654 /* Split all critical edges. */
5656 static unsigned int
5657 split_critical_edges (void)
5659 basic_block bb;
5660 edge e;
5661 edge_iterator ei;
5663 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5664 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5665 mappings around the calls to split_edge. */
5666 start_recording_case_labels ();
5667 FOR_ALL_BB (bb)
5669 FOR_EACH_EDGE (e, ei, bb->succs)
5670 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5672 split_edge (e);
5675 end_recording_case_labels ();
5676 return 0;
5679 struct tree_opt_pass pass_split_crit_edges =
5681 "crited", /* name */
5682 NULL, /* gate */
5683 split_critical_edges, /* execute */
5684 NULL, /* sub */
5685 NULL, /* next */
5686 0, /* static_pass_number */
5687 TV_TREE_SPLIT_EDGES, /* tv_id */
5688 PROP_cfg, /* properties required */
5689 PROP_no_crit_edges, /* properties_provided */
5690 0, /* properties_destroyed */
5691 0, /* todo_flags_start */
5692 TODO_dump_func, /* todo_flags_finish */
5693 0 /* letter */
5697 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5698 a temporary, make sure and register it to be renamed if necessary,
5699 and finally return the temporary. Put the statements to compute
5700 EXP before the current statement in BSI. */
5702 tree
5703 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5705 tree t, new_stmt, orig_stmt;
5707 if (is_gimple_val (exp))
5708 return exp;
5710 t = make_rename_temp (type, NULL);
5711 new_stmt = build2_gimple (GIMPLE_MODIFY_STMT, t, exp);
5713 orig_stmt = bsi_stmt (*bsi);
5714 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5715 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5717 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5718 if (gimple_in_ssa_p (cfun))
5719 mark_symbols_for_renaming (new_stmt);
5721 return t;
5724 /* Build a ternary operation and gimplify it. Emit code before BSI.
5725 Return the gimple_val holding the result. */
5727 tree
5728 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5729 tree type, tree a, tree b, tree c)
5731 tree ret;
5733 ret = fold_build3 (code, type, a, b, c);
5734 STRIP_NOPS (ret);
5736 return gimplify_val (bsi, type, ret);
5739 /* Build a binary operation and gimplify it. Emit code before BSI.
5740 Return the gimple_val holding the result. */
5742 tree
5743 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5744 tree type, tree a, tree b)
5746 tree ret;
5748 ret = fold_build2 (code, type, a, b);
5749 STRIP_NOPS (ret);
5751 return gimplify_val (bsi, type, ret);
5754 /* Build a unary operation and gimplify it. Emit code before BSI.
5755 Return the gimple_val holding the result. */
5757 tree
5758 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5759 tree a)
5761 tree ret;
5763 ret = fold_build1 (code, type, a);
5764 STRIP_NOPS (ret);
5766 return gimplify_val (bsi, type, ret);
5771 /* Emit return warnings. */
5773 static unsigned int
5774 execute_warn_function_return (void)
5776 #ifdef USE_MAPPED_LOCATION
5777 source_location location;
5778 #else
5779 location_t *locus;
5780 #endif
5781 tree last;
5782 edge e;
5783 edge_iterator ei;
5785 /* If we have a path to EXIT, then we do return. */
5786 if (TREE_THIS_VOLATILE (cfun->decl)
5787 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5789 #ifdef USE_MAPPED_LOCATION
5790 location = UNKNOWN_LOCATION;
5791 #else
5792 locus = NULL;
5793 #endif
5794 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5796 last = last_stmt (e->src);
5797 if (TREE_CODE (last) == RETURN_EXPR
5798 #ifdef USE_MAPPED_LOCATION
5799 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5800 #else
5801 && (locus = EXPR_LOCUS (last)) != NULL)
5802 #endif
5803 break;
5805 #ifdef USE_MAPPED_LOCATION
5806 if (location == UNKNOWN_LOCATION)
5807 location = cfun->function_end_locus;
5808 warning (0, "%H%<noreturn%> function does return", &location);
5809 #else
5810 if (!locus)
5811 locus = &cfun->function_end_locus;
5812 warning (0, "%H%<noreturn%> function does return", locus);
5813 #endif
5816 /* If we see "return;" in some basic block, then we do reach the end
5817 without returning a value. */
5818 else if (warn_return_type
5819 && !TREE_NO_WARNING (cfun->decl)
5820 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5821 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5823 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5825 tree last = last_stmt (e->src);
5826 if (TREE_CODE (last) == RETURN_EXPR
5827 && TREE_OPERAND (last, 0) == NULL
5828 && !TREE_NO_WARNING (last))
5830 #ifdef USE_MAPPED_LOCATION
5831 location = EXPR_LOCATION (last);
5832 if (location == UNKNOWN_LOCATION)
5833 location = cfun->function_end_locus;
5834 warning (0, "%Hcontrol reaches end of non-void function", &location);
5835 #else
5836 locus = EXPR_LOCUS (last);
5837 if (!locus)
5838 locus = &cfun->function_end_locus;
5839 warning (0, "%Hcontrol reaches end of non-void function", locus);
5840 #endif
5841 TREE_NO_WARNING (cfun->decl) = 1;
5842 break;
5846 return 0;
5850 /* Given a basic block B which ends with a conditional and has
5851 precisely two successors, determine which of the edges is taken if
5852 the conditional is true and which is taken if the conditional is
5853 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5855 void
5856 extract_true_false_edges_from_block (basic_block b,
5857 edge *true_edge,
5858 edge *false_edge)
5860 edge e = EDGE_SUCC (b, 0);
5862 if (e->flags & EDGE_TRUE_VALUE)
5864 *true_edge = e;
5865 *false_edge = EDGE_SUCC (b, 1);
5867 else
5869 *false_edge = e;
5870 *true_edge = EDGE_SUCC (b, 1);
5874 struct tree_opt_pass pass_warn_function_return =
5876 NULL, /* name */
5877 NULL, /* gate */
5878 execute_warn_function_return, /* execute */
5879 NULL, /* sub */
5880 NULL, /* next */
5881 0, /* static_pass_number */
5882 0, /* tv_id */
5883 PROP_cfg, /* properties_required */
5884 0, /* properties_provided */
5885 0, /* properties_destroyed */
5886 0, /* todo_flags_start */
5887 0, /* todo_flags_finish */
5888 0 /* letter */
5891 /* Emit noreturn warnings. */
5893 static unsigned int
5894 execute_warn_function_noreturn (void)
5896 if (warn_missing_noreturn
5897 && !TREE_THIS_VOLATILE (cfun->decl)
5898 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5899 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5900 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5901 "for attribute %<noreturn%>",
5902 cfun->decl);
5903 return 0;
5906 struct tree_opt_pass pass_warn_function_noreturn =
5908 NULL, /* name */
5909 NULL, /* gate */
5910 execute_warn_function_noreturn, /* execute */
5911 NULL, /* sub */
5912 NULL, /* next */
5913 0, /* static_pass_number */
5914 0, /* tv_id */
5915 PROP_cfg, /* properties_required */
5916 0, /* properties_provided */
5917 0, /* properties_destroyed */
5918 0, /* todo_flags_start */
5919 0, /* todo_flags_finish */
5920 0 /* letter */