2006-03-25 Thomas Koenig <Thomas.Koenig@online.de>
[official-gcc.git] / gcc / tree-cfg.c
blob6f0e947f9432d5e1957bd0aae934c2c0cbb7908d
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static unsigned int split_critical_edges (void);
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static unsigned int
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 return 0;
234 struct tree_opt_pass pass_build_cfg =
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts, /* todo_flags_finish */
248 0 /* letter */
251 /* Search the CFG for any computed gotos. If found, factor them to a
252 common computed goto site. Also record the location of that site so
253 that we can un-factor the gotos after we have converted back to
254 normal form. */
256 static void
257 factor_computed_gotos (void)
259 basic_block bb;
260 tree factored_label_decl = NULL;
261 tree var = NULL;
262 tree factored_computed_goto_label = NULL;
263 tree factored_computed_goto = NULL;
265 /* We know there are one or more computed gotos in this function.
266 Examine the last statement in each basic block to see if the block
267 ends with a computed goto. */
269 FOR_EACH_BB (bb)
271 block_stmt_iterator bsi = bsi_last (bb);
272 tree last;
274 if (bsi_end_p (bsi))
275 continue;
276 last = bsi_stmt (bsi);
278 /* Ignore the computed goto we create when we factor the original
279 computed gotos. */
280 if (last == factored_computed_goto)
281 continue;
283 /* If the last statement is a computed goto, factor it. */
284 if (computed_goto_p (last))
286 tree assignment;
288 /* The first time we find a computed goto we need to create
289 the factored goto block and the variable each original
290 computed goto will use for their goto destination. */
291 if (! factored_computed_goto)
293 basic_block new_bb = create_empty_bb (bb);
294 block_stmt_iterator new_bsi = bsi_start (new_bb);
296 /* Create the destination of the factored goto. Each original
297 computed goto will put its desired destination into this
298 variable and jump to the label we create immediately
299 below. */
300 var = create_tmp_var (ptr_type_node, "gotovar");
302 /* Build a label for the new block which will contain the
303 factored computed goto. */
304 factored_label_decl = create_artificial_label ();
305 factored_computed_goto_label
306 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 BSI_NEW_STMT);
310 /* Build our new computed goto. */
311 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 bsi_insert_after (&new_bsi, factored_computed_goto,
313 BSI_NEW_STMT);
316 /* Copy the original computed goto's destination into VAR. */
317 assignment = build2 (MODIFY_EXPR, ptr_type_node,
318 var, GOTO_DESTINATION (last));
319 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
321 /* And re-vector the computed goto to the new destination. */
322 GOTO_DESTINATION (last) = factored_label_decl;
328 /* Build a flowgraph for the statement_list STMT_LIST. */
330 static void
331 make_blocks (tree stmt_list)
333 tree_stmt_iterator i = tsi_start (stmt_list);
334 tree stmt = NULL;
335 bool start_new_block = true;
336 bool first_stmt_of_list = true;
337 basic_block bb = ENTRY_BLOCK_PTR;
339 while (!tsi_end_p (i))
341 tree prev_stmt;
343 prev_stmt = stmt;
344 stmt = tsi_stmt (i);
346 /* If the statement starts a new basic block or if we have determined
347 in a previous pass that we need to create a new block for STMT, do
348 so now. */
349 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
351 if (!first_stmt_of_list)
352 stmt_list = tsi_split_statement_list_before (&i);
353 bb = create_basic_block (stmt_list, NULL, bb);
354 start_new_block = false;
357 /* Now add STMT to BB and create the subgraphs for special statement
358 codes. */
359 set_bb_for_stmt (stmt, bb);
361 if (computed_goto_p (stmt))
362 found_computed_goto = true;
364 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 next iteration. */
366 if (stmt_ends_bb_p (stmt))
367 start_new_block = true;
369 tsi_next (&i);
370 first_stmt_of_list = false;
375 /* Create and return a new empty basic block after bb AFTER. */
377 static basic_block
378 create_bb (void *h, void *e, basic_block after)
380 basic_block bb;
382 gcc_assert (!e);
384 /* Create and initialize a new basic block. Since alloc_block uses
385 ggc_alloc_cleared to allocate a basic block, we do not have to
386 clear the newly allocated basic block here. */
387 bb = alloc_block ();
389 bb->index = last_basic_block;
390 bb->flags = BB_NEW;
391 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
399 size_t old_size = VEC_length (basic_block, basic_block_info);
400 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401 basic_block *p;
402 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403 p = VEC_address (basic_block, basic_block_info);
404 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
407 /* Add the newly created block to the array. */
408 SET_BASIC_BLOCK (last_basic_block, bb);
410 n_basic_blocks++;
411 last_basic_block++;
413 return bb;
417 /*---------------------------------------------------------------------------
418 Edge creation
419 ---------------------------------------------------------------------------*/
421 /* Fold COND_EXPR_COND of each COND_EXPR. */
423 void
424 fold_cond_expr_cond (void)
426 basic_block bb;
428 FOR_EACH_BB (bb)
430 tree stmt = last_stmt (bb);
432 if (stmt
433 && TREE_CODE (stmt) == COND_EXPR)
435 tree cond = fold (COND_EXPR_COND (stmt));
436 if (integer_zerop (cond))
437 COND_EXPR_COND (stmt) = boolean_false_node;
438 else if (integer_onep (cond))
439 COND_EXPR_COND (stmt) = boolean_true_node;
444 /* Join all the blocks in the flowgraph. */
446 static void
447 make_edges (void)
449 basic_block bb;
451 /* Create an edge from entry to the first block with executable
452 statements in it. */
453 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
455 /* Traverse the basic block array placing edges. */
456 FOR_EACH_BB (bb)
458 tree first = first_stmt (bb);
459 tree last = last_stmt (bb);
461 if (first)
463 /* Edges for statements that always alter flow control. */
464 if (is_ctrl_stmt (last))
465 make_ctrl_stmt_edges (bb);
467 /* Edges for statements that sometimes alter flow control. */
468 if (is_ctrl_altering_stmt (last))
469 make_exit_edges (bb);
472 /* Finally, if no edges were created above, this is a regular
473 basic block that only needs a fallthru edge. */
474 if (EDGE_COUNT (bb->succs) == 0)
475 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
478 /* We do not care about fake edges, so remove any that the CFG
479 builder inserted for completeness. */
480 remove_fake_exit_edges ();
482 /* Fold COND_EXPR_COND of each COND_EXPR. */
483 fold_cond_expr_cond ();
485 /* Clean up the graph and warn for unreachable code. */
486 cleanup_tree_cfg ();
490 /* Link an OMP_SECTIONS block to all the OMP_SECTION blocks in its body. */
492 static void
493 make_omp_sections_edges (basic_block bb)
495 basic_block exit_bb;
496 size_t i, n;
497 tree vec, stmt;
499 stmt = last_stmt (bb);
500 vec = OMP_SECTIONS_SECTIONS (stmt);
501 n = TREE_VEC_LENGTH (vec);
502 exit_bb = bb_for_stmt (TREE_VEC_ELT (vec, n - 1));
504 for (i = 0; i < n - 1; i += 2)
506 basic_block start_bb = bb_for_stmt (TREE_VEC_ELT (vec, i));
507 basic_block end_bb = bb_for_stmt (TREE_VEC_ELT (vec, i + 1));
508 make_edge (bb, start_bb, EDGE_ABNORMAL);
509 make_edge (end_bb, exit_bb, EDGE_FALLTHRU);
512 /* Once the CFG has been built, the vector of sections is no longer
513 useful. The region can be easily obtained with build_omp_regions.
514 Furthermore, this sharing of tree expressions is not allowed by the
515 statement verifier. */
516 OMP_SECTIONS_SECTIONS (stmt) = NULL_TREE;
521 /* Create edges for control statement at basic block BB. */
523 static void
524 make_ctrl_stmt_edges (basic_block bb)
526 tree last = last_stmt (bb);
528 gcc_assert (last);
529 switch (TREE_CODE (last))
531 case GOTO_EXPR:
532 make_goto_expr_edges (bb);
533 break;
535 case RETURN_EXPR:
536 make_edge (bb, EXIT_BLOCK_PTR, 0);
537 break;
539 case COND_EXPR:
540 make_cond_expr_edges (bb);
541 break;
543 case SWITCH_EXPR:
544 make_switch_expr_edges (bb);
545 break;
547 case RESX_EXPR:
548 make_eh_edges (last);
549 /* Yet another NORETURN hack. */
550 if (EDGE_COUNT (bb->succs) == 0)
551 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
552 break;
554 default:
555 gcc_unreachable ();
560 /* Create exit edges for statements in block BB that alter the flow of
561 control. Statements that alter the control flow are 'goto', 'return'
562 and calls to non-returning functions. */
564 static void
565 make_exit_edges (basic_block bb)
567 tree last = last_stmt (bb), op;
569 gcc_assert (last);
570 switch (TREE_CODE (last))
572 case RESX_EXPR:
573 break;
574 case CALL_EXPR:
575 /* If this function receives a nonlocal goto, then we need to
576 make edges from this call site to all the nonlocal goto
577 handlers. */
578 if (TREE_SIDE_EFFECTS (last)
579 && current_function_has_nonlocal_label)
580 make_goto_expr_edges (bb);
582 /* If this statement has reachable exception handlers, then
583 create abnormal edges to them. */
584 make_eh_edges (last);
586 /* Some calls are known not to return. For such calls we create
587 a fake edge.
589 We really need to revamp how we build edges so that it's not
590 such a bloody pain to avoid creating edges for this case since
591 all we do is remove these edges when we're done building the
592 CFG. */
593 if (call_expr_flags (last) & ECF_NORETURN)
595 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
596 return;
599 /* Don't forget the fall-thru edge. */
600 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
601 break;
603 case MODIFY_EXPR:
604 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
605 may have an abnormal edge. Search the RHS for this case and
606 create any required edges. */
607 op = get_call_expr_in (last);
608 if (op && TREE_SIDE_EFFECTS (op)
609 && current_function_has_nonlocal_label)
610 make_goto_expr_edges (bb);
612 make_eh_edges (last);
613 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
614 break;
616 case OMP_PARALLEL:
617 case OMP_FOR:
618 case OMP_SINGLE:
619 case OMP_MASTER:
620 case OMP_ORDERED:
621 case OMP_CRITICAL:
622 make_edge (bb, bb->next_bb, EDGE_ABNORMAL);
624 case OMP_RETURN_EXPR:
625 if (EDGE_COUNT (bb->succs) == 0)
626 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
627 break;
629 case OMP_SECTIONS:
630 make_omp_sections_edges (bb);
631 break;
633 case OMP_SECTION:
634 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
635 break;
637 default:
638 gcc_unreachable ();
643 /* Create the edges for a COND_EXPR starting at block BB.
644 At this point, both clauses must contain only simple gotos. */
646 static void
647 make_cond_expr_edges (basic_block bb)
649 tree entry = last_stmt (bb);
650 basic_block then_bb, else_bb;
651 tree then_label, else_label;
652 edge e;
654 gcc_assert (entry);
655 gcc_assert (TREE_CODE (entry) == COND_EXPR);
657 /* Entry basic blocks for each component. */
658 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
659 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
660 then_bb = label_to_block (then_label);
661 else_bb = label_to_block (else_label);
663 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
664 #ifdef USE_MAPPED_LOCATION
665 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
666 #else
667 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
668 #endif
669 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
670 if (e)
672 #ifdef USE_MAPPED_LOCATION
673 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
674 #else
675 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
676 #endif
680 /* Hashing routine for EDGE_TO_CASES. */
682 static hashval_t
683 edge_to_cases_hash (const void *p)
685 edge e = ((struct edge_to_cases_elt *)p)->e;
687 /* Hash on the edge itself (which is a pointer). */
688 return htab_hash_pointer (e);
691 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
692 for equality is just a pointer comparison. */
694 static int
695 edge_to_cases_eq (const void *p1, const void *p2)
697 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
698 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
700 return e1 == e2;
703 /* Called for each element in the hash table (P) as we delete the
704 edge to cases hash table.
706 Clear all the TREE_CHAINs to prevent problems with copying of
707 SWITCH_EXPRs and structure sharing rules, then free the hash table
708 element. */
710 static void
711 edge_to_cases_cleanup (void *p)
713 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
714 tree t, next;
716 for (t = elt->case_labels; t; t = next)
718 next = TREE_CHAIN (t);
719 TREE_CHAIN (t) = NULL;
721 free (p);
724 /* Start recording information mapping edges to case labels. */
726 void
727 start_recording_case_labels (void)
729 gcc_assert (edge_to_cases == NULL);
731 edge_to_cases = htab_create (37,
732 edge_to_cases_hash,
733 edge_to_cases_eq,
734 edge_to_cases_cleanup);
737 /* Return nonzero if we are recording information for case labels. */
739 static bool
740 recording_case_labels_p (void)
742 return (edge_to_cases != NULL);
745 /* Stop recording information mapping edges to case labels and
746 remove any information we have recorded. */
747 void
748 end_recording_case_labels (void)
750 htab_delete (edge_to_cases);
751 edge_to_cases = NULL;
754 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
756 static void
757 record_switch_edge (edge e, tree case_label)
759 struct edge_to_cases_elt *elt;
760 void **slot;
762 /* Build a hash table element so we can see if E is already
763 in the table. */
764 elt = XNEW (struct edge_to_cases_elt);
765 elt->e = e;
766 elt->case_labels = case_label;
768 slot = htab_find_slot (edge_to_cases, elt, INSERT);
770 if (*slot == NULL)
772 /* E was not in the hash table. Install E into the hash table. */
773 *slot = (void *)elt;
775 else
777 /* E was already in the hash table. Free ELT as we do not need it
778 anymore. */
779 free (elt);
781 /* Get the entry stored in the hash table. */
782 elt = (struct edge_to_cases_elt *) *slot;
784 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
785 TREE_CHAIN (case_label) = elt->case_labels;
786 elt->case_labels = case_label;
790 /* If we are inside a {start,end}_recording_cases block, then return
791 a chain of CASE_LABEL_EXPRs from T which reference E.
793 Otherwise return NULL. */
795 static tree
796 get_cases_for_edge (edge e, tree t)
798 struct edge_to_cases_elt elt, *elt_p;
799 void **slot;
800 size_t i, n;
801 tree vec;
803 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
804 chains available. Return NULL so the caller can detect this case. */
805 if (!recording_case_labels_p ())
806 return NULL;
808 restart:
809 elt.e = e;
810 elt.case_labels = NULL;
811 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
813 if (slot)
815 elt_p = (struct edge_to_cases_elt *)*slot;
816 return elt_p->case_labels;
819 /* If we did not find E in the hash table, then this must be the first
820 time we have been queried for information about E & T. Add all the
821 elements from T to the hash table then perform the query again. */
823 vec = SWITCH_LABELS (t);
824 n = TREE_VEC_LENGTH (vec);
825 for (i = 0; i < n; i++)
827 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
828 basic_block label_bb = label_to_block (lab);
829 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
831 goto restart;
834 /* Create the edges for a SWITCH_EXPR starting at block BB.
835 At this point, the switch body has been lowered and the
836 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
838 static void
839 make_switch_expr_edges (basic_block bb)
841 tree entry = last_stmt (bb);
842 size_t i, n;
843 tree vec;
845 vec = SWITCH_LABELS (entry);
846 n = TREE_VEC_LENGTH (vec);
848 for (i = 0; i < n; ++i)
850 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
851 basic_block label_bb = label_to_block (lab);
852 make_edge (bb, label_bb, 0);
857 /* Return the basic block holding label DEST. */
859 basic_block
860 label_to_block_fn (struct function *ifun, tree dest)
862 int uid = LABEL_DECL_UID (dest);
864 /* We would die hard when faced by an undefined label. Emit a label to
865 the very first basic block. This will hopefully make even the dataflow
866 and undefined variable warnings quite right. */
867 if ((errorcount || sorrycount) && uid < 0)
869 block_stmt_iterator bsi =
870 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
871 tree stmt;
873 stmt = build1 (LABEL_EXPR, void_type_node, dest);
874 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
875 uid = LABEL_DECL_UID (dest);
877 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
878 <= (unsigned int) uid)
879 return NULL;
880 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
883 /* Create edges for a goto statement at block BB. */
885 static void
886 make_goto_expr_edges (basic_block bb)
888 tree goto_t;
889 basic_block target_bb;
890 int for_call;
891 block_stmt_iterator last = bsi_last (bb);
893 goto_t = bsi_stmt (last);
895 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
896 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
897 from a nonlocal goto. */
898 if (TREE_CODE (goto_t) != GOTO_EXPR)
899 for_call = 1;
900 else
902 tree dest = GOTO_DESTINATION (goto_t);
903 for_call = 0;
905 /* A GOTO to a local label creates normal edges. */
906 if (simple_goto_p (goto_t))
908 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
909 #ifdef USE_MAPPED_LOCATION
910 e->goto_locus = EXPR_LOCATION (goto_t);
911 #else
912 e->goto_locus = EXPR_LOCUS (goto_t);
913 #endif
914 bsi_remove (&last, true);
915 return;
918 /* Nothing more to do for nonlocal gotos. */
919 if (TREE_CODE (dest) == LABEL_DECL)
920 return;
922 /* Computed gotos remain. */
925 /* Look for the block starting with the destination label. In the
926 case of a computed goto, make an edge to any label block we find
927 in the CFG. */
928 FOR_EACH_BB (target_bb)
930 block_stmt_iterator bsi;
932 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
934 tree target = bsi_stmt (bsi);
936 if (TREE_CODE (target) != LABEL_EXPR)
937 break;
939 if (
940 /* Computed GOTOs. Make an edge to every label block that has
941 been marked as a potential target for a computed goto. */
942 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
943 /* Nonlocal GOTO target. Make an edge to every label block
944 that has been marked as a potential target for a nonlocal
945 goto. */
946 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
948 make_edge (bb, target_bb, EDGE_ABNORMAL);
949 break;
954 /* Degenerate case of computed goto with no labels. */
955 if (!for_call && EDGE_COUNT (bb->succs) == 0)
956 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
960 /*---------------------------------------------------------------------------
961 Flowgraph analysis
962 ---------------------------------------------------------------------------*/
964 /* Cleanup useless labels in basic blocks. This is something we wish
965 to do early because it allows us to group case labels before creating
966 the edges for the CFG, and it speeds up block statement iterators in
967 all passes later on.
968 We only run this pass once, running it more than once is probably not
969 profitable. */
971 /* A map from basic block index to the leading label of that block. */
972 static tree *label_for_bb;
974 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
975 static void
976 update_eh_label (struct eh_region *region)
978 tree old_label = get_eh_region_tree_label (region);
979 if (old_label)
981 tree new_label;
982 basic_block bb = label_to_block (old_label);
984 /* ??? After optimizing, there may be EH regions with labels
985 that have already been removed from the function body, so
986 there is no basic block for them. */
987 if (! bb)
988 return;
990 new_label = label_for_bb[bb->index];
991 set_eh_region_tree_label (region, new_label);
995 /* Given LABEL return the first label in the same basic block. */
996 static tree
997 main_block_label (tree label)
999 basic_block bb = label_to_block (label);
1001 /* label_to_block possibly inserted undefined label into the chain. */
1002 if (!label_for_bb[bb->index])
1003 label_for_bb[bb->index] = label;
1004 return label_for_bb[bb->index];
1007 /* Cleanup redundant labels. This is a three-step process:
1008 1) Find the leading label for each block.
1009 2) Redirect all references to labels to the leading labels.
1010 3) Cleanup all useless labels. */
1012 void
1013 cleanup_dead_labels (void)
1015 basic_block bb;
1016 label_for_bb = XCNEWVEC (tree, last_basic_block);
1018 /* Find a suitable label for each block. We use the first user-defined
1019 label if there is one, or otherwise just the first label we see. */
1020 FOR_EACH_BB (bb)
1022 block_stmt_iterator i;
1024 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1026 tree label, stmt = bsi_stmt (i);
1028 if (TREE_CODE (stmt) != LABEL_EXPR)
1029 break;
1031 label = LABEL_EXPR_LABEL (stmt);
1033 /* If we have not yet seen a label for the current block,
1034 remember this one and see if there are more labels. */
1035 if (! label_for_bb[bb->index])
1037 label_for_bb[bb->index] = label;
1038 continue;
1041 /* If we did see a label for the current block already, but it
1042 is an artificially created label, replace it if the current
1043 label is a user defined label. */
1044 if (! DECL_ARTIFICIAL (label)
1045 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1047 label_for_bb[bb->index] = label;
1048 break;
1053 /* Now redirect all jumps/branches to the selected label.
1054 First do so for each block ending in a control statement. */
1055 FOR_EACH_BB (bb)
1057 tree stmt = last_stmt (bb);
1058 if (!stmt)
1059 continue;
1061 switch (TREE_CODE (stmt))
1063 case COND_EXPR:
1065 tree true_branch, false_branch;
1067 true_branch = COND_EXPR_THEN (stmt);
1068 false_branch = COND_EXPR_ELSE (stmt);
1070 GOTO_DESTINATION (true_branch)
1071 = main_block_label (GOTO_DESTINATION (true_branch));
1072 GOTO_DESTINATION (false_branch)
1073 = main_block_label (GOTO_DESTINATION (false_branch));
1075 break;
1078 case SWITCH_EXPR:
1080 size_t i;
1081 tree vec = SWITCH_LABELS (stmt);
1082 size_t n = TREE_VEC_LENGTH (vec);
1084 /* Replace all destination labels. */
1085 for (i = 0; i < n; ++i)
1087 tree elt = TREE_VEC_ELT (vec, i);
1088 tree label = main_block_label (CASE_LABEL (elt));
1089 CASE_LABEL (elt) = label;
1091 break;
1094 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1095 remove them until after we've created the CFG edges. */
1096 case GOTO_EXPR:
1097 if (! computed_goto_p (stmt))
1099 GOTO_DESTINATION (stmt)
1100 = main_block_label (GOTO_DESTINATION (stmt));
1101 break;
1104 default:
1105 break;
1109 for_each_eh_region (update_eh_label);
1111 /* Finally, purge dead labels. All user-defined labels and labels that
1112 can be the target of non-local gotos and labels which have their
1113 address taken are preserved. */
1114 FOR_EACH_BB (bb)
1116 block_stmt_iterator i;
1117 tree label_for_this_bb = label_for_bb[bb->index];
1119 if (! label_for_this_bb)
1120 continue;
1122 for (i = bsi_start (bb); !bsi_end_p (i); )
1124 tree label, stmt = bsi_stmt (i);
1126 if (TREE_CODE (stmt) != LABEL_EXPR)
1127 break;
1129 label = LABEL_EXPR_LABEL (stmt);
1131 if (label == label_for_this_bb
1132 || ! DECL_ARTIFICIAL (label)
1133 || DECL_NONLOCAL (label)
1134 || FORCED_LABEL (label))
1135 bsi_next (&i);
1136 else
1137 bsi_remove (&i, true);
1141 free (label_for_bb);
1144 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1145 and scan the sorted vector of cases. Combine the ones jumping to the
1146 same label.
1147 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1149 void
1150 group_case_labels (void)
1152 basic_block bb;
1154 FOR_EACH_BB (bb)
1156 tree stmt = last_stmt (bb);
1157 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1159 tree labels = SWITCH_LABELS (stmt);
1160 int old_size = TREE_VEC_LENGTH (labels);
1161 int i, j, new_size = old_size;
1162 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1163 tree default_label;
1165 /* The default label is always the last case in a switch
1166 statement after gimplification. */
1167 default_label = CASE_LABEL (default_case);
1169 /* Look for possible opportunities to merge cases.
1170 Ignore the last element of the label vector because it
1171 must be the default case. */
1172 i = 0;
1173 while (i < old_size - 1)
1175 tree base_case, base_label, base_high;
1176 base_case = TREE_VEC_ELT (labels, i);
1178 gcc_assert (base_case);
1179 base_label = CASE_LABEL (base_case);
1181 /* Discard cases that have the same destination as the
1182 default case. */
1183 if (base_label == default_label)
1185 TREE_VEC_ELT (labels, i) = NULL_TREE;
1186 i++;
1187 new_size--;
1188 continue;
1191 base_high = CASE_HIGH (base_case) ?
1192 CASE_HIGH (base_case) : CASE_LOW (base_case);
1193 i++;
1194 /* Try to merge case labels. Break out when we reach the end
1195 of the label vector or when we cannot merge the next case
1196 label with the current one. */
1197 while (i < old_size - 1)
1199 tree merge_case = TREE_VEC_ELT (labels, i);
1200 tree merge_label = CASE_LABEL (merge_case);
1201 tree t = int_const_binop (PLUS_EXPR, base_high,
1202 integer_one_node, 1);
1204 /* Merge the cases if they jump to the same place,
1205 and their ranges are consecutive. */
1206 if (merge_label == base_label
1207 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1209 base_high = CASE_HIGH (merge_case) ?
1210 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1211 CASE_HIGH (base_case) = base_high;
1212 TREE_VEC_ELT (labels, i) = NULL_TREE;
1213 new_size--;
1214 i++;
1216 else
1217 break;
1221 /* Compress the case labels in the label vector, and adjust the
1222 length of the vector. */
1223 for (i = 0, j = 0; i < new_size; i++)
1225 while (! TREE_VEC_ELT (labels, j))
1226 j++;
1227 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1229 TREE_VEC_LENGTH (labels) = new_size;
1234 /* Checks whether we can merge block B into block A. */
1236 static bool
1237 tree_can_merge_blocks_p (basic_block a, basic_block b)
1239 tree stmt;
1240 block_stmt_iterator bsi;
1241 tree phi;
1243 if (!single_succ_p (a))
1244 return false;
1246 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1247 return false;
1249 if (single_succ (a) != b)
1250 return false;
1252 if (!single_pred_p (b))
1253 return false;
1255 if (b == EXIT_BLOCK_PTR)
1256 return false;
1258 /* If A ends by a statement causing exceptions or something similar, we
1259 cannot merge the blocks. */
1260 stmt = last_stmt (a);
1261 if (stmt && stmt_ends_bb_p (stmt))
1262 return false;
1264 /* Do not allow a block with only a non-local label to be merged. */
1265 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1266 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1267 return false;
1269 /* It must be possible to eliminate all phi nodes in B. If ssa form
1270 is not up-to-date, we cannot eliminate any phis. */
1271 phi = phi_nodes (b);
1272 if (phi)
1274 if (need_ssa_update_p ())
1275 return false;
1277 for (; phi; phi = PHI_CHAIN (phi))
1278 if (!is_gimple_reg (PHI_RESULT (phi))
1279 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1280 return false;
1283 /* Do not remove user labels. */
1284 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1286 stmt = bsi_stmt (bsi);
1287 if (TREE_CODE (stmt) != LABEL_EXPR)
1288 break;
1289 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1290 return false;
1293 /* Protect the loop latches. */
1294 if (current_loops
1295 && b->loop_father->latch == b)
1296 return false;
1298 return true;
1301 /* Replaces all uses of NAME by VAL. */
1303 void
1304 replace_uses_by (tree name, tree val)
1306 imm_use_iterator imm_iter;
1307 use_operand_p use;
1308 tree stmt;
1309 edge e;
1310 unsigned i;
1311 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1313 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1315 stmt = USE_STMT (use);
1316 replace_exp (use, val);
1318 if (TREE_CODE (stmt) == PHI_NODE)
1320 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1321 if (e->flags & EDGE_ABNORMAL)
1323 /* This can only occur for virtual operands, since
1324 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1325 would prevent replacement. */
1326 gcc_assert (!is_gimple_reg (name));
1327 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1330 else
1331 VEC_safe_push (tree, heap, stmts, stmt);
1334 /* We do not update the statements in the loop above. Consider
1335 x = w * w;
1337 If we performed the update in the first loop, the statement
1338 would be rescanned after first occurrence of w is replaced,
1339 the new uses would be placed to the beginning of the list,
1340 and we would never process them. */
1341 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1343 tree rhs;
1345 fold_stmt_inplace (stmt);
1347 rhs = get_rhs (stmt);
1348 if (TREE_CODE (rhs) == ADDR_EXPR)
1349 recompute_tree_invariant_for_addr_expr (rhs);
1351 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1352 mark_new_vars_to_rename (stmt);
1355 VEC_free (tree, heap, stmts);
1357 /* Also update the trees stored in loop structures. */
1358 if (current_loops)
1360 struct loop *loop;
1362 for (i = 0; i < current_loops->num; i++)
1364 loop = current_loops->parray[i];
1365 if (loop)
1366 substitute_in_loop_info (loop, name, val);
1371 /* Merge block B into block A. */
1373 static void
1374 tree_merge_blocks (basic_block a, basic_block b)
1376 block_stmt_iterator bsi;
1377 tree_stmt_iterator last;
1378 tree phi;
1380 if (dump_file)
1381 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1383 /* Remove all single-valued PHI nodes from block B of the form
1384 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1385 bsi = bsi_last (a);
1386 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1388 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1389 tree copy;
1390 bool may_replace_uses = may_propagate_copy (def, use);
1392 /* In case we have loops to care about, do not propagate arguments of
1393 loop closed ssa phi nodes. */
1394 if (current_loops
1395 && is_gimple_reg (def)
1396 && TREE_CODE (use) == SSA_NAME
1397 && a->loop_father != b->loop_father)
1398 may_replace_uses = false;
1400 if (!may_replace_uses)
1402 gcc_assert (is_gimple_reg (def));
1404 /* Note that just emitting the copies is fine -- there is no problem
1405 with ordering of phi nodes. This is because A is the single
1406 predecessor of B, therefore results of the phi nodes cannot
1407 appear as arguments of the phi nodes. */
1408 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1409 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1410 SET_PHI_RESULT (phi, NULL_TREE);
1411 SSA_NAME_DEF_STMT (def) = copy;
1413 else
1414 replace_uses_by (def, use);
1416 remove_phi_node (phi, NULL);
1419 /* Ensure that B follows A. */
1420 move_block_after (b, a);
1422 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1423 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1425 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1426 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1428 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1430 tree label = bsi_stmt (bsi);
1432 bsi_remove (&bsi, false);
1433 /* Now that we can thread computed gotos, we might have
1434 a situation where we have a forced label in block B
1435 However, the label at the start of block B might still be
1436 used in other ways (think about the runtime checking for
1437 Fortran assigned gotos). So we can not just delete the
1438 label. Instead we move the label to the start of block A. */
1439 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1441 block_stmt_iterator dest_bsi = bsi_start (a);
1442 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1445 else
1447 set_bb_for_stmt (bsi_stmt (bsi), a);
1448 bsi_next (&bsi);
1452 /* Merge the chains. */
1453 last = tsi_last (a->stmt_list);
1454 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1455 b->stmt_list = NULL;
1459 /* Return the one of two successors of BB that is not reachable by a
1460 reached by a complex edge, if there is one. Else, return BB. We use
1461 this in optimizations that use post-dominators for their heuristics,
1462 to catch the cases in C++ where function calls are involved. */
1464 basic_block
1465 single_noncomplex_succ (basic_block bb)
1467 edge e0, e1;
1468 if (EDGE_COUNT (bb->succs) != 2)
1469 return bb;
1471 e0 = EDGE_SUCC (bb, 0);
1472 e1 = EDGE_SUCC (bb, 1);
1473 if (e0->flags & EDGE_COMPLEX)
1474 return e1->dest;
1475 if (e1->flags & EDGE_COMPLEX)
1476 return e0->dest;
1478 return bb;
1483 /* Walk the function tree removing unnecessary statements.
1485 * Empty statement nodes are removed
1487 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1489 * Unnecessary COND_EXPRs are removed
1491 * Some unnecessary BIND_EXPRs are removed
1493 Clearly more work could be done. The trick is doing the analysis
1494 and removal fast enough to be a net improvement in compile times.
1496 Note that when we remove a control structure such as a COND_EXPR
1497 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1498 to ensure we eliminate all the useless code. */
1500 struct rus_data
1502 tree *last_goto;
1503 bool repeat;
1504 bool may_throw;
1505 bool may_branch;
1506 bool has_label;
1509 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1511 static bool
1512 remove_useless_stmts_warn_notreached (tree stmt)
1514 if (EXPR_HAS_LOCATION (stmt))
1516 location_t loc = EXPR_LOCATION (stmt);
1517 if (LOCATION_LINE (loc) > 0)
1519 warning (0, "%Hwill never be executed", &loc);
1520 return true;
1524 switch (TREE_CODE (stmt))
1526 case STATEMENT_LIST:
1528 tree_stmt_iterator i;
1529 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1530 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1531 return true;
1533 break;
1535 case COND_EXPR:
1536 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1537 return true;
1538 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1539 return true;
1540 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1541 return true;
1542 break;
1544 case TRY_FINALLY_EXPR:
1545 case TRY_CATCH_EXPR:
1546 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1547 return true;
1548 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1549 return true;
1550 break;
1552 case CATCH_EXPR:
1553 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1554 case EH_FILTER_EXPR:
1555 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1556 case BIND_EXPR:
1557 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1559 default:
1560 /* Not a live container. */
1561 break;
1564 return false;
1567 static void
1568 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1570 tree then_clause, else_clause, cond;
1571 bool save_has_label, then_has_label, else_has_label;
1573 save_has_label = data->has_label;
1574 data->has_label = false;
1575 data->last_goto = NULL;
1577 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1579 then_has_label = data->has_label;
1580 data->has_label = false;
1581 data->last_goto = NULL;
1583 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1585 else_has_label = data->has_label;
1586 data->has_label = save_has_label | then_has_label | else_has_label;
1588 then_clause = COND_EXPR_THEN (*stmt_p);
1589 else_clause = COND_EXPR_ELSE (*stmt_p);
1590 cond = fold (COND_EXPR_COND (*stmt_p));
1592 /* If neither arm does anything at all, we can remove the whole IF. */
1593 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1595 *stmt_p = build_empty_stmt ();
1596 data->repeat = true;
1599 /* If there are no reachable statements in an arm, then we can
1600 zap the entire conditional. */
1601 else if (integer_nonzerop (cond) && !else_has_label)
1603 if (warn_notreached)
1604 remove_useless_stmts_warn_notreached (else_clause);
1605 *stmt_p = then_clause;
1606 data->repeat = true;
1608 else if (integer_zerop (cond) && !then_has_label)
1610 if (warn_notreached)
1611 remove_useless_stmts_warn_notreached (then_clause);
1612 *stmt_p = else_clause;
1613 data->repeat = true;
1616 /* Check a couple of simple things on then/else with single stmts. */
1617 else
1619 tree then_stmt = expr_only (then_clause);
1620 tree else_stmt = expr_only (else_clause);
1622 /* Notice branches to a common destination. */
1623 if (then_stmt && else_stmt
1624 && TREE_CODE (then_stmt) == GOTO_EXPR
1625 && TREE_CODE (else_stmt) == GOTO_EXPR
1626 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1628 *stmt_p = then_stmt;
1629 data->repeat = true;
1632 /* If the THEN/ELSE clause merely assigns a value to a variable or
1633 parameter which is already known to contain that value, then
1634 remove the useless THEN/ELSE clause. */
1635 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1637 if (else_stmt
1638 && TREE_CODE (else_stmt) == MODIFY_EXPR
1639 && TREE_OPERAND (else_stmt, 0) == cond
1640 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1641 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1643 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1644 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1645 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1646 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1648 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1649 ? then_stmt : else_stmt);
1650 tree *location = (TREE_CODE (cond) == EQ_EXPR
1651 ? &COND_EXPR_THEN (*stmt_p)
1652 : &COND_EXPR_ELSE (*stmt_p));
1654 if (stmt
1655 && TREE_CODE (stmt) == MODIFY_EXPR
1656 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1657 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1658 *location = alloc_stmt_list ();
1662 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1663 would be re-introduced during lowering. */
1664 data->last_goto = NULL;
1668 static void
1669 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1671 bool save_may_branch, save_may_throw;
1672 bool this_may_branch, this_may_throw;
1674 /* Collect may_branch and may_throw information for the body only. */
1675 save_may_branch = data->may_branch;
1676 save_may_throw = data->may_throw;
1677 data->may_branch = false;
1678 data->may_throw = false;
1679 data->last_goto = NULL;
1681 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1683 this_may_branch = data->may_branch;
1684 this_may_throw = data->may_throw;
1685 data->may_branch |= save_may_branch;
1686 data->may_throw |= save_may_throw;
1687 data->last_goto = NULL;
1689 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1691 /* If the body is empty, then we can emit the FINALLY block without
1692 the enclosing TRY_FINALLY_EXPR. */
1693 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1695 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1696 data->repeat = true;
1699 /* If the handler is empty, then we can emit the TRY block without
1700 the enclosing TRY_FINALLY_EXPR. */
1701 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1703 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1704 data->repeat = true;
1707 /* If the body neither throws, nor branches, then we can safely
1708 string the TRY and FINALLY blocks together. */
1709 else if (!this_may_branch && !this_may_throw)
1711 tree stmt = *stmt_p;
1712 *stmt_p = TREE_OPERAND (stmt, 0);
1713 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1714 data->repeat = true;
1719 static void
1720 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1722 bool save_may_throw, this_may_throw;
1723 tree_stmt_iterator i;
1724 tree stmt;
1726 /* Collect may_throw information for the body only. */
1727 save_may_throw = data->may_throw;
1728 data->may_throw = false;
1729 data->last_goto = NULL;
1731 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1733 this_may_throw = data->may_throw;
1734 data->may_throw = save_may_throw;
1736 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1737 if (!this_may_throw)
1739 if (warn_notreached)
1740 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1741 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1742 data->repeat = true;
1743 return;
1746 /* Process the catch clause specially. We may be able to tell that
1747 no exceptions propagate past this point. */
1749 this_may_throw = true;
1750 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1751 stmt = tsi_stmt (i);
1752 data->last_goto = NULL;
1754 switch (TREE_CODE (stmt))
1756 case CATCH_EXPR:
1757 for (; !tsi_end_p (i); tsi_next (&i))
1759 stmt = tsi_stmt (i);
1760 /* If we catch all exceptions, then the body does not
1761 propagate exceptions past this point. */
1762 if (CATCH_TYPES (stmt) == NULL)
1763 this_may_throw = false;
1764 data->last_goto = NULL;
1765 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1767 break;
1769 case EH_FILTER_EXPR:
1770 if (EH_FILTER_MUST_NOT_THROW (stmt))
1771 this_may_throw = false;
1772 else if (EH_FILTER_TYPES (stmt) == NULL)
1773 this_may_throw = false;
1774 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1775 break;
1777 default:
1778 /* Otherwise this is a cleanup. */
1779 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1781 /* If the cleanup is empty, then we can emit the TRY block without
1782 the enclosing TRY_CATCH_EXPR. */
1783 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1785 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1786 data->repeat = true;
1788 break;
1790 data->may_throw |= this_may_throw;
1794 static void
1795 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1797 tree block;
1799 /* First remove anything underneath the BIND_EXPR. */
1800 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1802 /* If the BIND_EXPR has no variables, then we can pull everything
1803 up one level and remove the BIND_EXPR, unless this is the toplevel
1804 BIND_EXPR for the current function or an inlined function.
1806 When this situation occurs we will want to apply this
1807 optimization again. */
1808 block = BIND_EXPR_BLOCK (*stmt_p);
1809 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1810 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1811 && (! block
1812 || ! BLOCK_ABSTRACT_ORIGIN (block)
1813 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1814 != FUNCTION_DECL)))
1816 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1817 data->repeat = true;
1822 static void
1823 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1825 tree dest = GOTO_DESTINATION (*stmt_p);
1827 data->may_branch = true;
1828 data->last_goto = NULL;
1830 /* Record the last goto expr, so that we can delete it if unnecessary. */
1831 if (TREE_CODE (dest) == LABEL_DECL)
1832 data->last_goto = stmt_p;
1836 static void
1837 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1839 tree label = LABEL_EXPR_LABEL (*stmt_p);
1841 data->has_label = true;
1843 /* We do want to jump across non-local label receiver code. */
1844 if (DECL_NONLOCAL (label))
1845 data->last_goto = NULL;
1847 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1849 *data->last_goto = build_empty_stmt ();
1850 data->repeat = true;
1853 /* ??? Add something here to delete unused labels. */
1857 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1858 decl. This allows us to eliminate redundant or useless
1859 calls to "const" functions.
1861 Gimplifier already does the same operation, but we may notice functions
1862 being const and pure once their calls has been gimplified, so we need
1863 to update the flag. */
1865 static void
1866 update_call_expr_flags (tree call)
1868 tree decl = get_callee_fndecl (call);
1869 if (!decl)
1870 return;
1871 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1872 TREE_SIDE_EFFECTS (call) = 0;
1873 if (TREE_NOTHROW (decl))
1874 TREE_NOTHROW (call) = 1;
1878 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1880 void
1881 notice_special_calls (tree t)
1883 int flags = call_expr_flags (t);
1885 if (flags & ECF_MAY_BE_ALLOCA)
1886 current_function_calls_alloca = true;
1887 if (flags & ECF_RETURNS_TWICE)
1888 current_function_calls_setjmp = true;
1892 /* Clear flags set by notice_special_calls. Used by dead code removal
1893 to update the flags. */
1895 void
1896 clear_special_calls (void)
1898 current_function_calls_alloca = false;
1899 current_function_calls_setjmp = false;
1903 static void
1904 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1906 tree t = *tp, op;
1908 switch (TREE_CODE (t))
1910 case COND_EXPR:
1911 remove_useless_stmts_cond (tp, data);
1912 break;
1914 case TRY_FINALLY_EXPR:
1915 remove_useless_stmts_tf (tp, data);
1916 break;
1918 case TRY_CATCH_EXPR:
1919 remove_useless_stmts_tc (tp, data);
1920 break;
1922 case BIND_EXPR:
1923 remove_useless_stmts_bind (tp, data);
1924 break;
1926 case GOTO_EXPR:
1927 remove_useless_stmts_goto (tp, data);
1928 break;
1930 case LABEL_EXPR:
1931 remove_useless_stmts_label (tp, data);
1932 break;
1934 case RETURN_EXPR:
1935 fold_stmt (tp);
1936 data->last_goto = NULL;
1937 data->may_branch = true;
1938 break;
1940 case CALL_EXPR:
1941 fold_stmt (tp);
1942 data->last_goto = NULL;
1943 notice_special_calls (t);
1944 update_call_expr_flags (t);
1945 if (tree_could_throw_p (t))
1946 data->may_throw = true;
1947 break;
1949 case MODIFY_EXPR:
1950 data->last_goto = NULL;
1951 fold_stmt (tp);
1952 op = get_call_expr_in (t);
1953 if (op)
1955 update_call_expr_flags (op);
1956 notice_special_calls (op);
1958 if (tree_could_throw_p (t))
1959 data->may_throw = true;
1960 break;
1962 case STATEMENT_LIST:
1964 tree_stmt_iterator i = tsi_start (t);
1965 while (!tsi_end_p (i))
1967 t = tsi_stmt (i);
1968 if (IS_EMPTY_STMT (t))
1970 tsi_delink (&i);
1971 continue;
1974 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1976 t = tsi_stmt (i);
1977 if (TREE_CODE (t) == STATEMENT_LIST)
1979 tsi_link_before (&i, t, TSI_SAME_STMT);
1980 tsi_delink (&i);
1982 else
1983 tsi_next (&i);
1986 break;
1987 case ASM_EXPR:
1988 fold_stmt (tp);
1989 data->last_goto = NULL;
1990 break;
1992 default:
1993 data->last_goto = NULL;
1994 break;
1998 static unsigned int
1999 remove_useless_stmts (void)
2001 struct rus_data data;
2003 clear_special_calls ();
2007 memset (&data, 0, sizeof (data));
2008 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
2010 while (data.repeat);
2011 return 0;
2015 struct tree_opt_pass pass_remove_useless_stmts =
2017 "useless", /* name */
2018 NULL, /* gate */
2019 remove_useless_stmts, /* execute */
2020 NULL, /* sub */
2021 NULL, /* next */
2022 0, /* static_pass_number */
2023 0, /* tv_id */
2024 PROP_gimple_any, /* properties_required */
2025 0, /* properties_provided */
2026 0, /* properties_destroyed */
2027 0, /* todo_flags_start */
2028 TODO_dump_func, /* todo_flags_finish */
2029 0 /* letter */
2032 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2034 static void
2035 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2037 tree phi;
2039 /* Since this block is no longer reachable, we can just delete all
2040 of its PHI nodes. */
2041 phi = phi_nodes (bb);
2042 while (phi)
2044 tree next = PHI_CHAIN (phi);
2045 remove_phi_node (phi, NULL_TREE);
2046 phi = next;
2049 /* Remove edges to BB's successors. */
2050 while (EDGE_COUNT (bb->succs) > 0)
2051 remove_edge (EDGE_SUCC (bb, 0));
2055 /* Remove statements of basic block BB. */
2057 static void
2058 remove_bb (basic_block bb)
2060 block_stmt_iterator i;
2061 #ifdef USE_MAPPED_LOCATION
2062 source_location loc = UNKNOWN_LOCATION;
2063 #else
2064 source_locus loc = 0;
2065 #endif
2067 if (dump_file)
2069 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2070 if (dump_flags & TDF_DETAILS)
2072 dump_bb (bb, dump_file, 0);
2073 fprintf (dump_file, "\n");
2077 /* If we remove the header or the latch of a loop, mark the loop for
2078 removal by setting its header and latch to NULL. */
2079 if (current_loops)
2081 struct loop *loop = bb->loop_father;
2083 if (loop->latch == bb
2084 || loop->header == bb)
2086 loop->latch = NULL;
2087 loop->header = NULL;
2089 /* Also clean up the information associated with the loop. Updating
2090 it would waste time. More importantly, it may refer to ssa
2091 names that were defined in other removed basic block -- these
2092 ssa names are now removed and invalid. */
2093 free_numbers_of_iterations_estimates_loop (loop);
2097 /* Remove all the instructions in the block. */
2098 for (i = bsi_start (bb); !bsi_end_p (i);)
2100 tree stmt = bsi_stmt (i);
2101 if (TREE_CODE (stmt) == LABEL_EXPR
2102 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2103 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2105 basic_block new_bb;
2106 block_stmt_iterator new_bsi;
2108 /* A non-reachable non-local label may still be referenced.
2109 But it no longer needs to carry the extra semantics of
2110 non-locality. */
2111 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2113 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2114 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2117 new_bb = bb->prev_bb;
2118 new_bsi = bsi_start (new_bb);
2119 bsi_remove (&i, false);
2120 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2122 else
2124 /* Release SSA definitions if we are in SSA. Note that we
2125 may be called when not in SSA. For example,
2126 final_cleanup calls this function via
2127 cleanup_tree_cfg. */
2128 if (in_ssa_p)
2129 release_defs (stmt);
2131 bsi_remove (&i, true);
2134 /* Don't warn for removed gotos. Gotos are often removed due to
2135 jump threading, thus resulting in bogus warnings. Not great,
2136 since this way we lose warnings for gotos in the original
2137 program that are indeed unreachable. */
2138 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2140 #ifdef USE_MAPPED_LOCATION
2141 if (EXPR_HAS_LOCATION (stmt))
2142 loc = EXPR_LOCATION (stmt);
2143 #else
2144 source_locus t;
2145 t = EXPR_LOCUS (stmt);
2146 if (t && LOCATION_LINE (*t) > 0)
2147 loc = t;
2148 #endif
2152 /* If requested, give a warning that the first statement in the
2153 block is unreachable. We walk statements backwards in the
2154 loop above, so the last statement we process is the first statement
2155 in the block. */
2156 #ifdef USE_MAPPED_LOCATION
2157 if (loc > BUILTINS_LOCATION)
2158 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2159 #else
2160 if (loc)
2161 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2162 #endif
2164 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2168 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2169 predicate VAL, return the edge that will be taken out of the block.
2170 If VAL does not match a unique edge, NULL is returned. */
2172 edge
2173 find_taken_edge (basic_block bb, tree val)
2175 tree stmt;
2177 stmt = last_stmt (bb);
2179 gcc_assert (stmt);
2180 gcc_assert (is_ctrl_stmt (stmt));
2181 gcc_assert (val);
2183 if (! is_gimple_min_invariant (val))
2184 return NULL;
2186 if (TREE_CODE (stmt) == COND_EXPR)
2187 return find_taken_edge_cond_expr (bb, val);
2189 if (TREE_CODE (stmt) == SWITCH_EXPR)
2190 return find_taken_edge_switch_expr (bb, val);
2192 if (computed_goto_p (stmt))
2193 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2195 gcc_unreachable ();
2198 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2199 statement, determine which of the outgoing edges will be taken out of the
2200 block. Return NULL if either edge may be taken. */
2202 static edge
2203 find_taken_edge_computed_goto (basic_block bb, tree val)
2205 basic_block dest;
2206 edge e = NULL;
2208 dest = label_to_block (val);
2209 if (dest)
2211 e = find_edge (bb, dest);
2212 gcc_assert (e != NULL);
2215 return e;
2218 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2219 statement, determine which of the two edges will be taken out of the
2220 block. Return NULL if either edge may be taken. */
2222 static edge
2223 find_taken_edge_cond_expr (basic_block bb, tree val)
2225 edge true_edge, false_edge;
2227 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2229 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2230 return (zero_p (val) ? false_edge : true_edge);
2233 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2234 statement, determine which edge will be taken out of the block. Return
2235 NULL if any edge may be taken. */
2237 static edge
2238 find_taken_edge_switch_expr (basic_block bb, tree val)
2240 tree switch_expr, taken_case;
2241 basic_block dest_bb;
2242 edge e;
2244 switch_expr = last_stmt (bb);
2245 taken_case = find_case_label_for_value (switch_expr, val);
2246 dest_bb = label_to_block (CASE_LABEL (taken_case));
2248 e = find_edge (bb, dest_bb);
2249 gcc_assert (e);
2250 return e;
2254 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2255 We can make optimal use here of the fact that the case labels are
2256 sorted: We can do a binary search for a case matching VAL. */
2258 static tree
2259 find_case_label_for_value (tree switch_expr, tree val)
2261 tree vec = SWITCH_LABELS (switch_expr);
2262 size_t low, high, n = TREE_VEC_LENGTH (vec);
2263 tree default_case = TREE_VEC_ELT (vec, n - 1);
2265 for (low = -1, high = n - 1; high - low > 1; )
2267 size_t i = (high + low) / 2;
2268 tree t = TREE_VEC_ELT (vec, i);
2269 int cmp;
2271 /* Cache the result of comparing CASE_LOW and val. */
2272 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2274 if (cmp > 0)
2275 high = i;
2276 else
2277 low = i;
2279 if (CASE_HIGH (t) == NULL)
2281 /* A singe-valued case label. */
2282 if (cmp == 0)
2283 return t;
2285 else
2287 /* A case range. We can only handle integer ranges. */
2288 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2289 return t;
2293 return default_case;
2299 /*---------------------------------------------------------------------------
2300 Debugging functions
2301 ---------------------------------------------------------------------------*/
2303 /* Dump tree-specific information of block BB to file OUTF. */
2305 void
2306 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2308 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2312 /* Dump a basic block on stderr. */
2314 void
2315 debug_tree_bb (basic_block bb)
2317 dump_bb (bb, stderr, 0);
2321 /* Dump basic block with index N on stderr. */
2323 basic_block
2324 debug_tree_bb_n (int n)
2326 debug_tree_bb (BASIC_BLOCK (n));
2327 return BASIC_BLOCK (n);
2331 /* Dump the CFG on stderr.
2333 FLAGS are the same used by the tree dumping functions
2334 (see TDF_* in tree.h). */
2336 void
2337 debug_tree_cfg (int flags)
2339 dump_tree_cfg (stderr, flags);
2343 /* Dump the program showing basic block boundaries on the given FILE.
2345 FLAGS are the same used by the tree dumping functions (see TDF_* in
2346 tree.h). */
2348 void
2349 dump_tree_cfg (FILE *file, int flags)
2351 if (flags & TDF_DETAILS)
2353 const char *funcname
2354 = lang_hooks.decl_printable_name (current_function_decl, 2);
2356 fputc ('\n', file);
2357 fprintf (file, ";; Function %s\n\n", funcname);
2358 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2359 n_basic_blocks, n_edges, last_basic_block);
2361 brief_dump_cfg (file);
2362 fprintf (file, "\n");
2365 if (flags & TDF_STATS)
2366 dump_cfg_stats (file);
2368 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2372 /* Dump CFG statistics on FILE. */
2374 void
2375 dump_cfg_stats (FILE *file)
2377 static long max_num_merged_labels = 0;
2378 unsigned long size, total = 0;
2379 long num_edges;
2380 basic_block bb;
2381 const char * const fmt_str = "%-30s%-13s%12s\n";
2382 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2383 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2384 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2385 const char *funcname
2386 = lang_hooks.decl_printable_name (current_function_decl, 2);
2389 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2391 fprintf (file, "---------------------------------------------------------\n");
2392 fprintf (file, fmt_str, "", " Number of ", "Memory");
2393 fprintf (file, fmt_str, "", " instances ", "used ");
2394 fprintf (file, "---------------------------------------------------------\n");
2396 size = n_basic_blocks * sizeof (struct basic_block_def);
2397 total += size;
2398 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2399 SCALE (size), LABEL (size));
2401 num_edges = 0;
2402 FOR_EACH_BB (bb)
2403 num_edges += EDGE_COUNT (bb->succs);
2404 size = num_edges * sizeof (struct edge_def);
2405 total += size;
2406 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2408 fprintf (file, "---------------------------------------------------------\n");
2409 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2410 LABEL (total));
2411 fprintf (file, "---------------------------------------------------------\n");
2412 fprintf (file, "\n");
2414 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2415 max_num_merged_labels = cfg_stats.num_merged_labels;
2417 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2418 cfg_stats.num_merged_labels, max_num_merged_labels);
2420 fprintf (file, "\n");
2424 /* Dump CFG statistics on stderr. Keep extern so that it's always
2425 linked in the final executable. */
2427 void
2428 debug_cfg_stats (void)
2430 dump_cfg_stats (stderr);
2434 /* Dump the flowgraph to a .vcg FILE. */
2436 static void
2437 tree_cfg2vcg (FILE *file)
2439 edge e;
2440 edge_iterator ei;
2441 basic_block bb;
2442 const char *funcname
2443 = lang_hooks.decl_printable_name (current_function_decl, 2);
2445 /* Write the file header. */
2446 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2447 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2448 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2450 /* Write blocks and edges. */
2451 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2453 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2454 e->dest->index);
2456 if (e->flags & EDGE_FAKE)
2457 fprintf (file, " linestyle: dotted priority: 10");
2458 else
2459 fprintf (file, " linestyle: solid priority: 100");
2461 fprintf (file, " }\n");
2463 fputc ('\n', file);
2465 FOR_EACH_BB (bb)
2467 enum tree_code head_code, end_code;
2468 const char *head_name, *end_name;
2469 int head_line = 0;
2470 int end_line = 0;
2471 tree first = first_stmt (bb);
2472 tree last = last_stmt (bb);
2474 if (first)
2476 head_code = TREE_CODE (first);
2477 head_name = tree_code_name[head_code];
2478 head_line = get_lineno (first);
2480 else
2481 head_name = "no-statement";
2483 if (last)
2485 end_code = TREE_CODE (last);
2486 end_name = tree_code_name[end_code];
2487 end_line = get_lineno (last);
2489 else
2490 end_name = "no-statement";
2492 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2493 bb->index, bb->index, head_name, head_line, end_name,
2494 end_line);
2496 FOR_EACH_EDGE (e, ei, bb->succs)
2498 if (e->dest == EXIT_BLOCK_PTR)
2499 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2500 else
2501 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2503 if (e->flags & EDGE_FAKE)
2504 fprintf (file, " priority: 10 linestyle: dotted");
2505 else
2506 fprintf (file, " priority: 100 linestyle: solid");
2508 fprintf (file, " }\n");
2511 if (bb->next_bb != EXIT_BLOCK_PTR)
2512 fputc ('\n', file);
2515 fputs ("}\n\n", file);
2520 /*---------------------------------------------------------------------------
2521 Miscellaneous helpers
2522 ---------------------------------------------------------------------------*/
2524 /* Return true if T represents a stmt that always transfers control. */
2526 bool
2527 is_ctrl_stmt (tree t)
2529 return (TREE_CODE (t) == COND_EXPR
2530 || TREE_CODE (t) == SWITCH_EXPR
2531 || TREE_CODE (t) == GOTO_EXPR
2532 || TREE_CODE (t) == RETURN_EXPR
2533 || TREE_CODE (t) == RESX_EXPR);
2537 /* Return true if T is a statement that may alter the flow of control
2538 (e.g., a call to a non-returning function). */
2540 bool
2541 is_ctrl_altering_stmt (tree t)
2543 tree call;
2545 gcc_assert (t);
2546 call = get_call_expr_in (t);
2547 if (call)
2549 /* A non-pure/const CALL_EXPR alters flow control if the current
2550 function has nonlocal labels. */
2551 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2552 return true;
2554 /* A CALL_EXPR also alters control flow if it does not return. */
2555 if (call_expr_flags (call) & ECF_NORETURN)
2556 return true;
2559 /* OpenMP directives alter control flow. */
2560 if (flag_openmp && OMP_DIRECTIVE_P (t))
2561 return true;
2563 /* If a statement can throw, it alters control flow. */
2564 return tree_can_throw_internal (t);
2568 /* Return true if T is a computed goto. */
2570 bool
2571 computed_goto_p (tree t)
2573 return (TREE_CODE (t) == GOTO_EXPR
2574 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2578 /* Checks whether EXPR is a simple local goto. */
2580 bool
2581 simple_goto_p (tree expr)
2583 return (TREE_CODE (expr) == GOTO_EXPR
2584 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2588 /* Return true if T should start a new basic block. PREV_T is the
2589 statement preceding T. It is used when T is a label or a case label.
2590 Labels should only start a new basic block if their previous statement
2591 wasn't a label. Otherwise, sequence of labels would generate
2592 unnecessary basic blocks that only contain a single label. */
2594 static inline bool
2595 stmt_starts_bb_p (tree t, tree prev_t)
2597 if (t == NULL_TREE)
2598 return false;
2600 /* LABEL_EXPRs start a new basic block only if the preceding
2601 statement wasn't a label of the same type. This prevents the
2602 creation of consecutive blocks that have nothing but a single
2603 label. */
2604 if (TREE_CODE (t) == LABEL_EXPR)
2606 /* Nonlocal and computed GOTO targets always start a new block. */
2607 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2608 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2609 return true;
2611 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2613 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2614 return true;
2616 cfg_stats.num_merged_labels++;
2617 return false;
2619 else
2620 return true;
2623 return false;
2627 /* Return true if T should end a basic block. */
2629 bool
2630 stmt_ends_bb_p (tree t)
2632 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2636 /* Add gotos that used to be represented implicitly in the CFG. */
2638 void
2639 disband_implicit_edges (void)
2641 basic_block bb;
2642 block_stmt_iterator last;
2643 edge e;
2644 edge_iterator ei;
2645 tree stmt, label;
2647 FOR_EACH_BB (bb)
2649 last = bsi_last (bb);
2650 stmt = last_stmt (bb);
2652 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2654 /* Remove superfluous gotos from COND_EXPR branches. Moved
2655 from cfg_remove_useless_stmts here since it violates the
2656 invariants for tree--cfg correspondence and thus fits better
2657 here where we do it anyway. */
2658 e = find_edge (bb, bb->next_bb);
2659 if (e)
2661 if (e->flags & EDGE_TRUE_VALUE)
2662 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2663 else if (e->flags & EDGE_FALSE_VALUE)
2664 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2665 else
2666 gcc_unreachable ();
2667 e->flags |= EDGE_FALLTHRU;
2670 continue;
2673 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2675 /* Remove the RETURN_EXPR if we may fall though to the exit
2676 instead. */
2677 gcc_assert (single_succ_p (bb));
2678 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2680 if (bb->next_bb == EXIT_BLOCK_PTR
2681 && !TREE_OPERAND (stmt, 0))
2683 bsi_remove (&last, true);
2684 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2686 continue;
2689 /* There can be no fallthru edge if the last statement is a control
2690 one. */
2691 if (stmt && is_ctrl_stmt (stmt))
2692 continue;
2694 /* Find a fallthru edge and emit the goto if necessary. */
2695 FOR_EACH_EDGE (e, ei, bb->succs)
2696 if (e->flags & EDGE_FALLTHRU)
2697 break;
2699 if (!e || e->dest == bb->next_bb)
2700 continue;
2702 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2703 label = tree_block_label (e->dest);
2705 stmt = build1 (GOTO_EXPR, void_type_node, label);
2706 #ifdef USE_MAPPED_LOCATION
2707 SET_EXPR_LOCATION (stmt, e->goto_locus);
2708 #else
2709 SET_EXPR_LOCUS (stmt, e->goto_locus);
2710 #endif
2711 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2712 e->flags &= ~EDGE_FALLTHRU;
2716 /* Remove block annotations and other datastructures. */
2718 void
2719 delete_tree_cfg_annotations (void)
2721 label_to_block_map = NULL;
2725 /* Return the first statement in basic block BB. */
2727 tree
2728 first_stmt (basic_block bb)
2730 block_stmt_iterator i = bsi_start (bb);
2731 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2735 /* Return the last statement in basic block BB. */
2737 tree
2738 last_stmt (basic_block bb)
2740 block_stmt_iterator b = bsi_last (bb);
2741 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2745 /* Return a pointer to the last statement in block BB. */
2747 tree *
2748 last_stmt_ptr (basic_block bb)
2750 block_stmt_iterator last = bsi_last (bb);
2751 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2755 /* Return the last statement of an otherwise empty block. Return NULL
2756 if the block is totally empty, or if it contains more than one
2757 statement. */
2759 tree
2760 last_and_only_stmt (basic_block bb)
2762 block_stmt_iterator i = bsi_last (bb);
2763 tree last, prev;
2765 if (bsi_end_p (i))
2766 return NULL_TREE;
2768 last = bsi_stmt (i);
2769 bsi_prev (&i);
2770 if (bsi_end_p (i))
2771 return last;
2773 /* Empty statements should no longer appear in the instruction stream.
2774 Everything that might have appeared before should be deleted by
2775 remove_useless_stmts, and the optimizers should just bsi_remove
2776 instead of smashing with build_empty_stmt.
2778 Thus the only thing that should appear here in a block containing
2779 one executable statement is a label. */
2780 prev = bsi_stmt (i);
2781 if (TREE_CODE (prev) == LABEL_EXPR)
2782 return last;
2783 else
2784 return NULL_TREE;
2788 /* Mark BB as the basic block holding statement T. */
2790 void
2791 set_bb_for_stmt (tree t, basic_block bb)
2793 if (TREE_CODE (t) == PHI_NODE)
2794 PHI_BB (t) = bb;
2795 else if (TREE_CODE (t) == STATEMENT_LIST)
2797 tree_stmt_iterator i;
2798 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2799 set_bb_for_stmt (tsi_stmt (i), bb);
2801 else
2803 stmt_ann_t ann = get_stmt_ann (t);
2804 ann->bb = bb;
2806 /* If the statement is a label, add the label to block-to-labels map
2807 so that we can speed up edge creation for GOTO_EXPRs. */
2808 if (TREE_CODE (t) == LABEL_EXPR)
2810 int uid;
2812 t = LABEL_EXPR_LABEL (t);
2813 uid = LABEL_DECL_UID (t);
2814 if (uid == -1)
2816 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2817 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2818 if (old_len <= (unsigned) uid)
2820 basic_block *addr;
2821 unsigned new_len = 3 * uid / 2;
2823 VEC_safe_grow (basic_block, gc, label_to_block_map,
2824 new_len);
2825 addr = VEC_address (basic_block, label_to_block_map);
2826 memset (&addr[old_len],
2827 0, sizeof (basic_block) * (new_len - old_len));
2830 else
2831 /* We're moving an existing label. Make sure that we've
2832 removed it from the old block. */
2833 gcc_assert (!bb
2834 || !VEC_index (basic_block, label_to_block_map, uid));
2835 VEC_replace (basic_block, label_to_block_map, uid, bb);
2840 /* Finds iterator for STMT. */
2842 extern block_stmt_iterator
2843 bsi_for_stmt (tree stmt)
2845 block_stmt_iterator bsi;
2847 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2848 if (bsi_stmt (bsi) == stmt)
2849 return bsi;
2851 gcc_unreachable ();
2854 /* Mark statement T as modified, and update it. */
2855 static inline void
2856 update_modified_stmts (tree t)
2858 if (TREE_CODE (t) == STATEMENT_LIST)
2860 tree_stmt_iterator i;
2861 tree stmt;
2862 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2864 stmt = tsi_stmt (i);
2865 update_stmt_if_modified (stmt);
2868 else
2869 update_stmt_if_modified (t);
2872 /* Insert statement (or statement list) T before the statement
2873 pointed-to by iterator I. M specifies how to update iterator I
2874 after insertion (see enum bsi_iterator_update). */
2876 void
2877 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2879 set_bb_for_stmt (t, i->bb);
2880 update_modified_stmts (t);
2881 tsi_link_before (&i->tsi, t, m);
2885 /* Insert statement (or statement list) T after the statement
2886 pointed-to by iterator I. M specifies how to update iterator I
2887 after insertion (see enum bsi_iterator_update). */
2889 void
2890 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2892 set_bb_for_stmt (t, i->bb);
2893 update_modified_stmts (t);
2894 tsi_link_after (&i->tsi, t, m);
2898 /* Remove the statement pointed to by iterator I. The iterator is updated
2899 to the next statement.
2901 When REMOVE_EH_INFO is true we remove the statement pointed to by
2902 iterator I from the EH tables. Otherwise we do not modify the EH
2903 tables.
2905 Generally, REMOVE_EH_INFO should be true when the statement is going to
2906 be removed from the IL and not reinserted elsewhere. */
2908 void
2909 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2911 tree t = bsi_stmt (*i);
2912 set_bb_for_stmt (t, NULL);
2913 delink_stmt_imm_use (t);
2914 tsi_delink (&i->tsi);
2915 mark_stmt_modified (t);
2916 if (remove_eh_info)
2917 remove_stmt_from_eh_region (t);
2921 /* Move the statement at FROM so it comes right after the statement at TO. */
2923 void
2924 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2926 tree stmt = bsi_stmt (*from);
2927 bsi_remove (from, false);
2928 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2932 /* Move the statement at FROM so it comes right before the statement at TO. */
2934 void
2935 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2937 tree stmt = bsi_stmt (*from);
2938 bsi_remove (from, false);
2939 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2943 /* Move the statement at FROM to the end of basic block BB. */
2945 void
2946 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2948 block_stmt_iterator last = bsi_last (bb);
2950 /* Have to check bsi_end_p because it could be an empty block. */
2951 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2952 bsi_move_before (from, &last);
2953 else
2954 bsi_move_after (from, &last);
2958 /* Replace the contents of the statement pointed to by iterator BSI
2959 with STMT. If UPDATE_EH_INFO is true, the exception handling
2960 information of the original statement is moved to the new statement. */
2963 void
2964 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2966 int eh_region;
2967 tree orig_stmt = bsi_stmt (*bsi);
2969 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2970 set_bb_for_stmt (stmt, bsi->bb);
2972 /* Preserve EH region information from the original statement, if
2973 requested by the caller. */
2974 if (update_eh_info)
2976 eh_region = lookup_stmt_eh_region (orig_stmt);
2977 if (eh_region >= 0)
2979 remove_stmt_from_eh_region (orig_stmt);
2980 add_stmt_to_eh_region (stmt, eh_region);
2984 delink_stmt_imm_use (orig_stmt);
2985 *bsi_stmt_ptr (*bsi) = stmt;
2986 mark_stmt_modified (stmt);
2987 update_modified_stmts (stmt);
2991 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2992 is made to place the statement in an existing basic block, but
2993 sometimes that isn't possible. When it isn't possible, the edge is
2994 split and the statement is added to the new block.
2996 In all cases, the returned *BSI points to the correct location. The
2997 return value is true if insertion should be done after the location,
2998 or false if it should be done before the location. If new basic block
2999 has to be created, it is stored in *NEW_BB. */
3001 static bool
3002 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3003 basic_block *new_bb)
3005 basic_block dest, src;
3006 tree tmp;
3008 dest = e->dest;
3009 restart:
3011 /* If the destination has one predecessor which has no PHI nodes,
3012 insert there. Except for the exit block.
3014 The requirement for no PHI nodes could be relaxed. Basically we
3015 would have to examine the PHIs to prove that none of them used
3016 the value set by the statement we want to insert on E. That
3017 hardly seems worth the effort. */
3018 if (single_pred_p (dest)
3019 && ! phi_nodes (dest)
3020 && dest != EXIT_BLOCK_PTR)
3022 *bsi = bsi_start (dest);
3023 if (bsi_end_p (*bsi))
3024 return true;
3026 /* Make sure we insert after any leading labels. */
3027 tmp = bsi_stmt (*bsi);
3028 while (TREE_CODE (tmp) == LABEL_EXPR)
3030 bsi_next (bsi);
3031 if (bsi_end_p (*bsi))
3032 break;
3033 tmp = bsi_stmt (*bsi);
3036 if (bsi_end_p (*bsi))
3038 *bsi = bsi_last (dest);
3039 return true;
3041 else
3042 return false;
3045 /* If the source has one successor, the edge is not abnormal and
3046 the last statement does not end a basic block, insert there.
3047 Except for the entry block. */
3048 src = e->src;
3049 if ((e->flags & EDGE_ABNORMAL) == 0
3050 && single_succ_p (src)
3051 && src != ENTRY_BLOCK_PTR)
3053 *bsi = bsi_last (src);
3054 if (bsi_end_p (*bsi))
3055 return true;
3057 tmp = bsi_stmt (*bsi);
3058 if (!stmt_ends_bb_p (tmp))
3059 return true;
3061 /* Insert code just before returning the value. We may need to decompose
3062 the return in the case it contains non-trivial operand. */
3063 if (TREE_CODE (tmp) == RETURN_EXPR)
3065 tree op = TREE_OPERAND (tmp, 0);
3066 if (op && !is_gimple_val (op))
3068 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3069 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3070 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3072 bsi_prev (bsi);
3073 return true;
3077 /* Otherwise, create a new basic block, and split this edge. */
3078 dest = split_edge (e);
3079 if (new_bb)
3080 *new_bb = dest;
3081 e = single_pred_edge (dest);
3082 goto restart;
3086 /* This routine will commit all pending edge insertions, creating any new
3087 basic blocks which are necessary. */
3089 void
3090 bsi_commit_edge_inserts (void)
3092 basic_block bb;
3093 edge e;
3094 edge_iterator ei;
3096 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3098 FOR_EACH_BB (bb)
3099 FOR_EACH_EDGE (e, ei, bb->succs)
3100 bsi_commit_one_edge_insert (e, NULL);
3104 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3105 to this block, otherwise set it to NULL. */
3107 void
3108 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3110 if (new_bb)
3111 *new_bb = NULL;
3112 if (PENDING_STMT (e))
3114 block_stmt_iterator bsi;
3115 tree stmt = PENDING_STMT (e);
3117 PENDING_STMT (e) = NULL_TREE;
3119 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3120 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3121 else
3122 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3127 /* Add STMT to the pending list of edge E. No actual insertion is
3128 made until a call to bsi_commit_edge_inserts () is made. */
3130 void
3131 bsi_insert_on_edge (edge e, tree stmt)
3133 append_to_statement_list (stmt, &PENDING_STMT (e));
3136 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3137 block has to be created, it is returned. */
3139 basic_block
3140 bsi_insert_on_edge_immediate (edge e, tree stmt)
3142 block_stmt_iterator bsi;
3143 basic_block new_bb = NULL;
3145 gcc_assert (!PENDING_STMT (e));
3147 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3148 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3149 else
3150 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3152 return new_bb;
3155 /*---------------------------------------------------------------------------
3156 Tree specific functions for CFG manipulation
3157 ---------------------------------------------------------------------------*/
3159 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3161 static void
3162 reinstall_phi_args (edge new_edge, edge old_edge)
3164 tree var, phi;
3166 if (!PENDING_STMT (old_edge))
3167 return;
3169 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3170 var && phi;
3171 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3173 tree result = TREE_PURPOSE (var);
3174 tree arg = TREE_VALUE (var);
3176 gcc_assert (result == PHI_RESULT (phi));
3178 add_phi_arg (phi, arg, new_edge);
3181 PENDING_STMT (old_edge) = NULL;
3184 /* Returns the basic block after that the new basic block created
3185 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3186 near its "logical" location. This is of most help to humans looking
3187 at debugging dumps. */
3189 static basic_block
3190 split_edge_bb_loc (edge edge_in)
3192 basic_block dest = edge_in->dest;
3194 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3195 return edge_in->src;
3196 else
3197 return dest->prev_bb;
3200 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3201 Abort on abnormal edges. */
3203 static basic_block
3204 tree_split_edge (edge edge_in)
3206 basic_block new_bb, after_bb, dest, src;
3207 edge new_edge, e;
3209 /* Abnormal edges cannot be split. */
3210 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3212 src = edge_in->src;
3213 dest = edge_in->dest;
3215 after_bb = split_edge_bb_loc (edge_in);
3217 new_bb = create_empty_bb (after_bb);
3218 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3219 new_bb->count = edge_in->count;
3220 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3221 new_edge->probability = REG_BR_PROB_BASE;
3222 new_edge->count = edge_in->count;
3224 e = redirect_edge_and_branch (edge_in, new_bb);
3225 gcc_assert (e);
3226 reinstall_phi_args (new_edge, e);
3228 return new_bb;
3232 /* Return true when BB has label LABEL in it. */
3234 static bool
3235 has_label_p (basic_block bb, tree label)
3237 block_stmt_iterator bsi;
3239 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3241 tree stmt = bsi_stmt (bsi);
3243 if (TREE_CODE (stmt) != LABEL_EXPR)
3244 return false;
3245 if (LABEL_EXPR_LABEL (stmt) == label)
3246 return true;
3248 return false;
3252 /* Callback for walk_tree, check that all elements with address taken are
3253 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3254 inside a PHI node. */
3256 static tree
3257 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3259 tree t = *tp, x;
3260 bool in_phi = (data != NULL);
3262 if (TYPE_P (t))
3263 *walk_subtrees = 0;
3265 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3266 #define CHECK_OP(N, MSG) \
3267 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3268 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3270 switch (TREE_CODE (t))
3272 case SSA_NAME:
3273 if (SSA_NAME_IN_FREE_LIST (t))
3275 error ("SSA name in freelist but still referenced");
3276 return *tp;
3278 break;
3280 case ASSERT_EXPR:
3281 x = fold (ASSERT_EXPR_COND (t));
3282 if (x == boolean_false_node)
3284 error ("ASSERT_EXPR with an always-false condition");
3285 return *tp;
3287 break;
3289 case MODIFY_EXPR:
3290 x = TREE_OPERAND (t, 0);
3291 if (TREE_CODE (x) == BIT_FIELD_REF
3292 && is_gimple_reg (TREE_OPERAND (x, 0)))
3294 error ("GIMPLE register modified with BIT_FIELD_REF");
3295 return t;
3297 break;
3299 case ADDR_EXPR:
3301 bool old_invariant;
3302 bool old_constant;
3303 bool old_side_effects;
3304 bool new_invariant;
3305 bool new_constant;
3306 bool new_side_effects;
3308 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3309 dead PHIs that take the address of something. But if the PHI
3310 result is dead, the fact that it takes the address of anything
3311 is irrelevant. Because we can not tell from here if a PHI result
3312 is dead, we just skip this check for PHIs altogether. This means
3313 we may be missing "valid" checks, but what can you do?
3314 This was PR19217. */
3315 if (in_phi)
3316 break;
3318 old_invariant = TREE_INVARIANT (t);
3319 old_constant = TREE_CONSTANT (t);
3320 old_side_effects = TREE_SIDE_EFFECTS (t);
3322 recompute_tree_invariant_for_addr_expr (t);
3323 new_invariant = TREE_INVARIANT (t);
3324 new_side_effects = TREE_SIDE_EFFECTS (t);
3325 new_constant = TREE_CONSTANT (t);
3327 if (old_invariant != new_invariant)
3329 error ("invariant not recomputed when ADDR_EXPR changed");
3330 return t;
3333 if (old_constant != new_constant)
3335 error ("constant not recomputed when ADDR_EXPR changed");
3336 return t;
3338 if (old_side_effects != new_side_effects)
3340 error ("side effects not recomputed when ADDR_EXPR changed");
3341 return t;
3344 /* Skip any references (they will be checked when we recurse down the
3345 tree) and ensure that any variable used as a prefix is marked
3346 addressable. */
3347 for (x = TREE_OPERAND (t, 0);
3348 handled_component_p (x);
3349 x = TREE_OPERAND (x, 0))
3352 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3353 return NULL;
3354 if (!TREE_ADDRESSABLE (x))
3356 error ("address taken, but ADDRESSABLE bit not set");
3357 return x;
3359 break;
3362 case COND_EXPR:
3363 x = COND_EXPR_COND (t);
3364 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3366 error ("non-boolean used in condition");
3367 return x;
3369 if (!is_gimple_condexpr (x))
3371 error ("invalid conditional operand");
3372 return x;
3374 break;
3376 case NOP_EXPR:
3377 case CONVERT_EXPR:
3378 case FIX_TRUNC_EXPR:
3379 case FIX_CEIL_EXPR:
3380 case FIX_FLOOR_EXPR:
3381 case FIX_ROUND_EXPR:
3382 case FLOAT_EXPR:
3383 case NEGATE_EXPR:
3384 case ABS_EXPR:
3385 case BIT_NOT_EXPR:
3386 case NON_LVALUE_EXPR:
3387 case TRUTH_NOT_EXPR:
3388 CHECK_OP (0, "invalid operand to unary operator");
3389 break;
3391 case REALPART_EXPR:
3392 case IMAGPART_EXPR:
3393 case COMPONENT_REF:
3394 case ARRAY_REF:
3395 case ARRAY_RANGE_REF:
3396 case BIT_FIELD_REF:
3397 case VIEW_CONVERT_EXPR:
3398 /* We have a nest of references. Verify that each of the operands
3399 that determine where to reference is either a constant or a variable,
3400 verify that the base is valid, and then show we've already checked
3401 the subtrees. */
3402 while (handled_component_p (t))
3404 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3405 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3406 else if (TREE_CODE (t) == ARRAY_REF
3407 || TREE_CODE (t) == ARRAY_RANGE_REF)
3409 CHECK_OP (1, "invalid array index");
3410 if (TREE_OPERAND (t, 2))
3411 CHECK_OP (2, "invalid array lower bound");
3412 if (TREE_OPERAND (t, 3))
3413 CHECK_OP (3, "invalid array stride");
3415 else if (TREE_CODE (t) == BIT_FIELD_REF)
3417 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3418 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3421 t = TREE_OPERAND (t, 0);
3424 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3426 error ("invalid reference prefix");
3427 return t;
3429 *walk_subtrees = 0;
3430 break;
3432 case LT_EXPR:
3433 case LE_EXPR:
3434 case GT_EXPR:
3435 case GE_EXPR:
3436 case EQ_EXPR:
3437 case NE_EXPR:
3438 case UNORDERED_EXPR:
3439 case ORDERED_EXPR:
3440 case UNLT_EXPR:
3441 case UNLE_EXPR:
3442 case UNGT_EXPR:
3443 case UNGE_EXPR:
3444 case UNEQ_EXPR:
3445 case LTGT_EXPR:
3446 case PLUS_EXPR:
3447 case MINUS_EXPR:
3448 case MULT_EXPR:
3449 case TRUNC_DIV_EXPR:
3450 case CEIL_DIV_EXPR:
3451 case FLOOR_DIV_EXPR:
3452 case ROUND_DIV_EXPR:
3453 case TRUNC_MOD_EXPR:
3454 case CEIL_MOD_EXPR:
3455 case FLOOR_MOD_EXPR:
3456 case ROUND_MOD_EXPR:
3457 case RDIV_EXPR:
3458 case EXACT_DIV_EXPR:
3459 case MIN_EXPR:
3460 case MAX_EXPR:
3461 case LSHIFT_EXPR:
3462 case RSHIFT_EXPR:
3463 case LROTATE_EXPR:
3464 case RROTATE_EXPR:
3465 case BIT_IOR_EXPR:
3466 case BIT_XOR_EXPR:
3467 case BIT_AND_EXPR:
3468 CHECK_OP (0, "invalid operand to binary operator");
3469 CHECK_OP (1, "invalid operand to binary operator");
3470 break;
3472 default:
3473 break;
3475 return NULL;
3477 #undef CHECK_OP
3481 /* Verify STMT, return true if STMT is not in GIMPLE form.
3482 TODO: Implement type checking. */
3484 static bool
3485 verify_stmt (tree stmt, bool last_in_block)
3487 tree addr;
3489 if (OMP_DIRECTIVE_P (stmt))
3491 /* OpenMP directives are validated by the FE and never operated
3492 on by the optimizers. Furthermore, OMP_FOR may contain
3493 non-gimple expressions when the main index variable has had
3494 its address taken. This does not affect the loop itself
3495 because the header of an OMP_FOR is merely used to determine
3496 how to setup the parallel iteration. */
3497 return false;
3500 if (!is_gimple_stmt (stmt))
3502 error ("is not a valid GIMPLE statement");
3503 goto fail;
3506 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3507 if (addr)
3509 debug_generic_stmt (addr);
3510 return true;
3513 /* If the statement is marked as part of an EH region, then it is
3514 expected that the statement could throw. Verify that when we
3515 have optimizations that simplify statements such that we prove
3516 that they cannot throw, that we update other data structures
3517 to match. */
3518 if (lookup_stmt_eh_region (stmt) >= 0)
3520 if (!tree_could_throw_p (stmt))
3522 error ("statement marked for throw, but doesn%'t");
3523 goto fail;
3525 if (!last_in_block && tree_can_throw_internal (stmt))
3527 error ("statement marked for throw in middle of block");
3528 goto fail;
3532 return false;
3534 fail:
3535 debug_generic_stmt (stmt);
3536 return true;
3540 /* Return true when the T can be shared. */
3542 static bool
3543 tree_node_can_be_shared (tree t)
3545 if (IS_TYPE_OR_DECL_P (t)
3546 || is_gimple_min_invariant (t)
3547 || TREE_CODE (t) == SSA_NAME
3548 || t == error_mark_node
3549 || TREE_CODE (t) == IDENTIFIER_NODE)
3550 return true;
3552 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3553 return true;
3555 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3556 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3557 || TREE_CODE (t) == COMPONENT_REF
3558 || TREE_CODE (t) == REALPART_EXPR
3559 || TREE_CODE (t) == IMAGPART_EXPR)
3560 t = TREE_OPERAND (t, 0);
3562 if (DECL_P (t))
3563 return true;
3565 return false;
3569 /* Called via walk_trees. Verify tree sharing. */
3571 static tree
3572 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3574 htab_t htab = (htab_t) data;
3575 void **slot;
3577 if (tree_node_can_be_shared (*tp))
3579 *walk_subtrees = false;
3580 return NULL;
3583 slot = htab_find_slot (htab, *tp, INSERT);
3584 if (*slot)
3585 return (tree) *slot;
3586 *slot = *tp;
3588 return NULL;
3592 /* Verify the GIMPLE statement chain. */
3594 void
3595 verify_stmts (void)
3597 basic_block bb;
3598 block_stmt_iterator bsi;
3599 bool err = false;
3600 htab_t htab;
3601 tree addr;
3603 timevar_push (TV_TREE_STMT_VERIFY);
3604 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3606 FOR_EACH_BB (bb)
3608 tree phi;
3609 int i;
3611 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3613 int phi_num_args = PHI_NUM_ARGS (phi);
3615 if (bb_for_stmt (phi) != bb)
3617 error ("bb_for_stmt (phi) is set to a wrong basic block");
3618 err |= true;
3621 for (i = 0; i < phi_num_args; i++)
3623 tree t = PHI_ARG_DEF (phi, i);
3624 tree addr;
3626 /* Addressable variables do have SSA_NAMEs but they
3627 are not considered gimple values. */
3628 if (TREE_CODE (t) != SSA_NAME
3629 && TREE_CODE (t) != FUNCTION_DECL
3630 && !is_gimple_val (t))
3632 error ("PHI def is not a GIMPLE value");
3633 debug_generic_stmt (phi);
3634 debug_generic_stmt (t);
3635 err |= true;
3638 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3639 if (addr)
3641 debug_generic_stmt (addr);
3642 err |= true;
3645 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3646 if (addr)
3648 error ("incorrect sharing of tree nodes");
3649 debug_generic_stmt (phi);
3650 debug_generic_stmt (addr);
3651 err |= true;
3656 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3658 tree stmt = bsi_stmt (bsi);
3660 if (bb_for_stmt (stmt) != bb)
3662 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3663 err |= true;
3666 bsi_next (&bsi);
3667 err |= verify_stmt (stmt, bsi_end_p (bsi));
3668 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3669 if (addr)
3671 error ("incorrect sharing of tree nodes");
3672 debug_generic_stmt (stmt);
3673 debug_generic_stmt (addr);
3674 err |= true;
3679 if (err)
3680 internal_error ("verify_stmts failed");
3682 htab_delete (htab);
3683 timevar_pop (TV_TREE_STMT_VERIFY);
3687 /* Verifies that the flow information is OK. */
3689 static int
3690 tree_verify_flow_info (void)
3692 int err = 0;
3693 basic_block bb;
3694 block_stmt_iterator bsi;
3695 tree stmt;
3696 edge e;
3697 edge_iterator ei;
3699 if (ENTRY_BLOCK_PTR->stmt_list)
3701 error ("ENTRY_BLOCK has a statement list associated with it");
3702 err = 1;
3705 if (EXIT_BLOCK_PTR->stmt_list)
3707 error ("EXIT_BLOCK has a statement list associated with it");
3708 err = 1;
3711 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3712 if (e->flags & EDGE_FALLTHRU)
3714 error ("fallthru to exit from bb %d", e->src->index);
3715 err = 1;
3718 FOR_EACH_BB (bb)
3720 bool found_ctrl_stmt = false;
3722 stmt = NULL_TREE;
3724 /* Skip labels on the start of basic block. */
3725 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3727 tree prev_stmt = stmt;
3729 stmt = bsi_stmt (bsi);
3731 if (TREE_CODE (stmt) != LABEL_EXPR)
3732 break;
3734 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3736 error ("nonlocal label ");
3737 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3738 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3739 bb->index);
3740 err = 1;
3743 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3745 error ("label ");
3746 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3747 fprintf (stderr, " to block does not match in bb %d",
3748 bb->index);
3749 err = 1;
3752 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3753 != current_function_decl)
3755 error ("label ");
3756 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3757 fprintf (stderr, " has incorrect context in bb %d",
3758 bb->index);
3759 err = 1;
3763 /* Verify that body of basic block BB is free of control flow. */
3764 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3766 tree stmt = bsi_stmt (bsi);
3768 if (found_ctrl_stmt)
3770 error ("control flow in the middle of basic block %d",
3771 bb->index);
3772 err = 1;
3775 if (stmt_ends_bb_p (stmt))
3776 found_ctrl_stmt = true;
3778 if (TREE_CODE (stmt) == LABEL_EXPR)
3780 error ("label ");
3781 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3782 fprintf (stderr, " in the middle of basic block %d", bb->index);
3783 err = 1;
3787 bsi = bsi_last (bb);
3788 if (bsi_end_p (bsi))
3789 continue;
3791 stmt = bsi_stmt (bsi);
3793 err |= verify_eh_edges (stmt);
3795 if (is_ctrl_stmt (stmt))
3797 FOR_EACH_EDGE (e, ei, bb->succs)
3798 if (e->flags & EDGE_FALLTHRU)
3800 error ("fallthru edge after a control statement in bb %d",
3801 bb->index);
3802 err = 1;
3806 switch (TREE_CODE (stmt))
3808 case COND_EXPR:
3810 edge true_edge;
3811 edge false_edge;
3812 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3813 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3815 error ("structured COND_EXPR at the end of bb %d", bb->index);
3816 err = 1;
3819 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3821 if (!true_edge || !false_edge
3822 || !(true_edge->flags & EDGE_TRUE_VALUE)
3823 || !(false_edge->flags & EDGE_FALSE_VALUE)
3824 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3825 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3826 || EDGE_COUNT (bb->succs) >= 3)
3828 error ("wrong outgoing edge flags at end of bb %d",
3829 bb->index);
3830 err = 1;
3833 if (!has_label_p (true_edge->dest,
3834 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3836 error ("%<then%> label does not match edge at end of bb %d",
3837 bb->index);
3838 err = 1;
3841 if (!has_label_p (false_edge->dest,
3842 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3844 error ("%<else%> label does not match edge at end of bb %d",
3845 bb->index);
3846 err = 1;
3849 break;
3851 case GOTO_EXPR:
3852 if (simple_goto_p (stmt))
3854 error ("explicit goto at end of bb %d", bb->index);
3855 err = 1;
3857 else
3859 /* FIXME. We should double check that the labels in the
3860 destination blocks have their address taken. */
3861 FOR_EACH_EDGE (e, ei, bb->succs)
3862 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3863 | EDGE_FALSE_VALUE))
3864 || !(e->flags & EDGE_ABNORMAL))
3866 error ("wrong outgoing edge flags at end of bb %d",
3867 bb->index);
3868 err = 1;
3871 break;
3873 case RETURN_EXPR:
3874 if (!single_succ_p (bb)
3875 || (single_succ_edge (bb)->flags
3876 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3877 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3879 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3880 err = 1;
3882 if (single_succ (bb) != EXIT_BLOCK_PTR)
3884 error ("return edge does not point to exit in bb %d",
3885 bb->index);
3886 err = 1;
3888 break;
3890 case SWITCH_EXPR:
3892 tree prev;
3893 edge e;
3894 size_t i, n;
3895 tree vec;
3897 vec = SWITCH_LABELS (stmt);
3898 n = TREE_VEC_LENGTH (vec);
3900 /* Mark all the destination basic blocks. */
3901 for (i = 0; i < n; ++i)
3903 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3904 basic_block label_bb = label_to_block (lab);
3906 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3907 label_bb->aux = (void *)1;
3910 /* Verify that the case labels are sorted. */
3911 prev = TREE_VEC_ELT (vec, 0);
3912 for (i = 1; i < n - 1; ++i)
3914 tree c = TREE_VEC_ELT (vec, i);
3915 if (! CASE_LOW (c))
3917 error ("found default case not at end of case vector");
3918 err = 1;
3919 continue;
3921 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3923 error ("case labels not sorted: ");
3924 print_generic_expr (stderr, prev, 0);
3925 fprintf (stderr," is greater than ");
3926 print_generic_expr (stderr, c, 0);
3927 fprintf (stderr," but comes before it.\n");
3928 err = 1;
3930 prev = c;
3932 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3934 error ("no default case found at end of case vector");
3935 err = 1;
3938 FOR_EACH_EDGE (e, ei, bb->succs)
3940 if (!e->dest->aux)
3942 error ("extra outgoing edge %d->%d",
3943 bb->index, e->dest->index);
3944 err = 1;
3946 e->dest->aux = (void *)2;
3947 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3948 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3950 error ("wrong outgoing edge flags at end of bb %d",
3951 bb->index);
3952 err = 1;
3956 /* Check that we have all of them. */
3957 for (i = 0; i < n; ++i)
3959 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3960 basic_block label_bb = label_to_block (lab);
3962 if (label_bb->aux != (void *)2)
3964 error ("missing edge %i->%i",
3965 bb->index, label_bb->index);
3966 err = 1;
3970 FOR_EACH_EDGE (e, ei, bb->succs)
3971 e->dest->aux = (void *)0;
3974 default: ;
3978 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3979 verify_dominators (CDI_DOMINATORS);
3981 return err;
3985 /* Updates phi nodes after creating a forwarder block joined
3986 by edge FALLTHRU. */
3988 static void
3989 tree_make_forwarder_block (edge fallthru)
3991 edge e;
3992 edge_iterator ei;
3993 basic_block dummy, bb;
3994 tree phi, new_phi, var;
3996 dummy = fallthru->src;
3997 bb = fallthru->dest;
3999 if (single_pred_p (bb))
4000 return;
4002 /* If we redirected a branch we must create new phi nodes at the
4003 start of BB. */
4004 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4006 var = PHI_RESULT (phi);
4007 new_phi = create_phi_node (var, bb);
4008 SSA_NAME_DEF_STMT (var) = new_phi;
4009 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4010 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4013 /* Ensure that the PHI node chain is in the same order. */
4014 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4016 /* Add the arguments we have stored on edges. */
4017 FOR_EACH_EDGE (e, ei, bb->preds)
4019 if (e == fallthru)
4020 continue;
4022 flush_pending_stmts (e);
4027 /* Return a non-special label in the head of basic block BLOCK.
4028 Create one if it doesn't exist. */
4030 tree
4031 tree_block_label (basic_block bb)
4033 block_stmt_iterator i, s = bsi_start (bb);
4034 bool first = true;
4035 tree label, stmt;
4037 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4039 stmt = bsi_stmt (i);
4040 if (TREE_CODE (stmt) != LABEL_EXPR)
4041 break;
4042 label = LABEL_EXPR_LABEL (stmt);
4043 if (!DECL_NONLOCAL (label))
4045 if (!first)
4046 bsi_move_before (&i, &s);
4047 return label;
4051 label = create_artificial_label ();
4052 stmt = build1 (LABEL_EXPR, void_type_node, label);
4053 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4054 return label;
4058 /* Attempt to perform edge redirection by replacing a possibly complex
4059 jump instruction by a goto or by removing the jump completely.
4060 This can apply only if all edges now point to the same block. The
4061 parameters and return values are equivalent to
4062 redirect_edge_and_branch. */
4064 static edge
4065 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4067 basic_block src = e->src;
4068 block_stmt_iterator b;
4069 tree stmt;
4071 /* We can replace or remove a complex jump only when we have exactly
4072 two edges. */
4073 if (EDGE_COUNT (src->succs) != 2
4074 /* Verify that all targets will be TARGET. Specifically, the
4075 edge that is not E must also go to TARGET. */
4076 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4077 return NULL;
4079 b = bsi_last (src);
4080 if (bsi_end_p (b))
4081 return NULL;
4082 stmt = bsi_stmt (b);
4084 if (TREE_CODE (stmt) == COND_EXPR
4085 || TREE_CODE (stmt) == SWITCH_EXPR)
4087 bsi_remove (&b, true);
4088 e = ssa_redirect_edge (e, target);
4089 e->flags = EDGE_FALLTHRU;
4090 return e;
4093 return NULL;
4097 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4098 edge representing the redirected branch. */
4100 static edge
4101 tree_redirect_edge_and_branch (edge e, basic_block dest)
4103 basic_block bb = e->src;
4104 block_stmt_iterator bsi;
4105 edge ret;
4106 tree label, stmt;
4108 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4109 return NULL;
4111 if (e->src != ENTRY_BLOCK_PTR
4112 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4113 return ret;
4115 if (e->dest == dest)
4116 return NULL;
4118 label = tree_block_label (dest);
4120 bsi = bsi_last (bb);
4121 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4123 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4125 case COND_EXPR:
4126 stmt = (e->flags & EDGE_TRUE_VALUE
4127 ? COND_EXPR_THEN (stmt)
4128 : COND_EXPR_ELSE (stmt));
4129 GOTO_DESTINATION (stmt) = label;
4130 break;
4132 case GOTO_EXPR:
4133 /* No non-abnormal edges should lead from a non-simple goto, and
4134 simple ones should be represented implicitly. */
4135 gcc_unreachable ();
4137 case SWITCH_EXPR:
4139 tree cases = get_cases_for_edge (e, stmt);
4141 /* If we have a list of cases associated with E, then use it
4142 as it's a lot faster than walking the entire case vector. */
4143 if (cases)
4145 edge e2 = find_edge (e->src, dest);
4146 tree last, first;
4148 first = cases;
4149 while (cases)
4151 last = cases;
4152 CASE_LABEL (cases) = label;
4153 cases = TREE_CHAIN (cases);
4156 /* If there was already an edge in the CFG, then we need
4157 to move all the cases associated with E to E2. */
4158 if (e2)
4160 tree cases2 = get_cases_for_edge (e2, stmt);
4162 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4163 TREE_CHAIN (cases2) = first;
4166 else
4168 tree vec = SWITCH_LABELS (stmt);
4169 size_t i, n = TREE_VEC_LENGTH (vec);
4171 for (i = 0; i < n; i++)
4173 tree elt = TREE_VEC_ELT (vec, i);
4175 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4176 CASE_LABEL (elt) = label;
4180 break;
4183 case RETURN_EXPR:
4184 bsi_remove (&bsi, true);
4185 e->flags |= EDGE_FALLTHRU;
4186 break;
4188 default:
4189 /* Otherwise it must be a fallthru edge, and we don't need to
4190 do anything besides redirecting it. */
4191 gcc_assert (e->flags & EDGE_FALLTHRU);
4192 break;
4195 /* Update/insert PHI nodes as necessary. */
4197 /* Now update the edges in the CFG. */
4198 e = ssa_redirect_edge (e, dest);
4200 return e;
4204 /* Simple wrapper, as we can always redirect fallthru edges. */
4206 static basic_block
4207 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4209 e = tree_redirect_edge_and_branch (e, dest);
4210 gcc_assert (e);
4212 return NULL;
4216 /* Splits basic block BB after statement STMT (but at least after the
4217 labels). If STMT is NULL, BB is split just after the labels. */
4219 static basic_block
4220 tree_split_block (basic_block bb, void *stmt)
4222 block_stmt_iterator bsi, bsi_tgt;
4223 tree act;
4224 basic_block new_bb;
4225 edge e;
4226 edge_iterator ei;
4228 new_bb = create_empty_bb (bb);
4230 /* Redirect the outgoing edges. */
4231 new_bb->succs = bb->succs;
4232 bb->succs = NULL;
4233 FOR_EACH_EDGE (e, ei, new_bb->succs)
4234 e->src = new_bb;
4236 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4237 stmt = NULL;
4239 /* Move everything from BSI to the new basic block. */
4240 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4242 act = bsi_stmt (bsi);
4243 if (TREE_CODE (act) == LABEL_EXPR)
4244 continue;
4246 if (!stmt)
4247 break;
4249 if (stmt == act)
4251 bsi_next (&bsi);
4252 break;
4256 bsi_tgt = bsi_start (new_bb);
4257 while (!bsi_end_p (bsi))
4259 act = bsi_stmt (bsi);
4260 bsi_remove (&bsi, false);
4261 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4264 return new_bb;
4268 /* Moves basic block BB after block AFTER. */
4270 static bool
4271 tree_move_block_after (basic_block bb, basic_block after)
4273 if (bb->prev_bb == after)
4274 return true;
4276 unlink_block (bb);
4277 link_block (bb, after);
4279 return true;
4283 /* Return true if basic_block can be duplicated. */
4285 static bool
4286 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4288 return true;
4292 /* Create a duplicate of the basic block BB. NOTE: This does not
4293 preserve SSA form. */
4295 static basic_block
4296 tree_duplicate_bb (basic_block bb)
4298 basic_block new_bb;
4299 block_stmt_iterator bsi, bsi_tgt;
4300 tree phi;
4302 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4304 /* Copy the PHI nodes. We ignore PHI node arguments here because
4305 the incoming edges have not been setup yet. */
4306 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4308 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4309 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4312 /* Keep the chain of PHI nodes in the same order so that they can be
4313 updated by ssa_redirect_edge. */
4314 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4316 bsi_tgt = bsi_start (new_bb);
4317 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4319 def_operand_p def_p;
4320 ssa_op_iter op_iter;
4321 tree stmt, copy;
4322 int region;
4324 stmt = bsi_stmt (bsi);
4325 if (TREE_CODE (stmt) == LABEL_EXPR)
4326 continue;
4328 /* Create a new copy of STMT and duplicate STMT's virtual
4329 operands. */
4330 copy = unshare_expr (stmt);
4331 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4332 copy_virtual_operands (copy, stmt);
4333 region = lookup_stmt_eh_region (stmt);
4334 if (region >= 0)
4335 add_stmt_to_eh_region (copy, region);
4337 /* Create new names for all the definitions created by COPY and
4338 add replacement mappings for each new name. */
4339 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4340 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4343 return new_bb;
4347 /* Basic block BB_COPY was created by code duplication. Add phi node
4348 arguments for edges going out of BB_COPY. The blocks that were
4349 duplicated have BB_DUPLICATED set. */
4351 void
4352 add_phi_args_after_copy_bb (basic_block bb_copy)
4354 basic_block bb, dest;
4355 edge e, e_copy;
4356 edge_iterator ei;
4357 tree phi, phi_copy, phi_next, def;
4359 bb = get_bb_original (bb_copy);
4361 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4363 if (!phi_nodes (e_copy->dest))
4364 continue;
4366 if (e_copy->dest->flags & BB_DUPLICATED)
4367 dest = get_bb_original (e_copy->dest);
4368 else
4369 dest = e_copy->dest;
4371 e = find_edge (bb, dest);
4372 if (!e)
4374 /* During loop unrolling the target of the latch edge is copied.
4375 In this case we are not looking for edge to dest, but to
4376 duplicated block whose original was dest. */
4377 FOR_EACH_EDGE (e, ei, bb->succs)
4378 if ((e->dest->flags & BB_DUPLICATED)
4379 && get_bb_original (e->dest) == dest)
4380 break;
4382 gcc_assert (e != NULL);
4385 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4386 phi;
4387 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4389 phi_next = PHI_CHAIN (phi);
4390 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4391 add_phi_arg (phi_copy, def, e_copy);
4396 /* Blocks in REGION_COPY array of length N_REGION were created by
4397 duplication of basic blocks. Add phi node arguments for edges
4398 going from these blocks. */
4400 void
4401 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4403 unsigned i;
4405 for (i = 0; i < n_region; i++)
4406 region_copy[i]->flags |= BB_DUPLICATED;
4408 for (i = 0; i < n_region; i++)
4409 add_phi_args_after_copy_bb (region_copy[i]);
4411 for (i = 0; i < n_region; i++)
4412 region_copy[i]->flags &= ~BB_DUPLICATED;
4415 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4416 important exit edge EXIT. By important we mean that no SSA name defined
4417 inside region is live over the other exit edges of the region. All entry
4418 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4419 to the duplicate of the region. SSA form, dominance and loop information
4420 is updated. The new basic blocks are stored to REGION_COPY in the same
4421 order as they had in REGION, provided that REGION_COPY is not NULL.
4422 The function returns false if it is unable to copy the region,
4423 true otherwise. */
4425 bool
4426 tree_duplicate_sese_region (edge entry, edge exit,
4427 basic_block *region, unsigned n_region,
4428 basic_block *region_copy)
4430 unsigned i, n_doms;
4431 bool free_region_copy = false, copying_header = false;
4432 struct loop *loop = entry->dest->loop_father;
4433 edge exit_copy;
4434 basic_block *doms;
4435 edge redirected;
4436 int total_freq = 0, entry_freq = 0;
4437 gcov_type total_count = 0, entry_count = 0;
4439 if (!can_copy_bbs_p (region, n_region))
4440 return false;
4442 /* Some sanity checking. Note that we do not check for all possible
4443 missuses of the functions. I.e. if you ask to copy something weird,
4444 it will work, but the state of structures probably will not be
4445 correct. */
4446 for (i = 0; i < n_region; i++)
4448 /* We do not handle subloops, i.e. all the blocks must belong to the
4449 same loop. */
4450 if (region[i]->loop_father != loop)
4451 return false;
4453 if (region[i] != entry->dest
4454 && region[i] == loop->header)
4455 return false;
4458 loop->copy = loop;
4460 /* In case the function is used for loop header copying (which is the primary
4461 use), ensure that EXIT and its copy will be new latch and entry edges. */
4462 if (loop->header == entry->dest)
4464 copying_header = true;
4465 loop->copy = loop->outer;
4467 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4468 return false;
4470 for (i = 0; i < n_region; i++)
4471 if (region[i] != exit->src
4472 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4473 return false;
4476 if (!region_copy)
4478 region_copy = XNEWVEC (basic_block, n_region);
4479 free_region_copy = true;
4482 gcc_assert (!need_ssa_update_p ());
4484 /* Record blocks outside the region that are dominated by something
4485 inside. */
4486 doms = XNEWVEC (basic_block, n_basic_blocks);
4487 initialize_original_copy_tables ();
4489 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4491 if (entry->dest->count)
4493 total_count = entry->dest->count;
4494 entry_count = entry->count;
4495 /* Fix up corner cases, to avoid division by zero or creation of negative
4496 frequencies. */
4497 if (entry_count > total_count)
4498 entry_count = total_count;
4500 else
4502 total_freq = entry->dest->frequency;
4503 entry_freq = EDGE_FREQUENCY (entry);
4504 /* Fix up corner cases, to avoid division by zero or creation of negative
4505 frequencies. */
4506 if (total_freq == 0)
4507 total_freq = 1;
4508 else if (entry_freq > total_freq)
4509 entry_freq = total_freq;
4512 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4513 split_edge_bb_loc (entry));
4514 if (total_count)
4516 scale_bbs_frequencies_gcov_type (region, n_region,
4517 total_count - entry_count,
4518 total_count);
4519 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4520 total_count);
4522 else
4524 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4525 total_freq);
4526 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4529 if (copying_header)
4531 loop->header = exit->dest;
4532 loop->latch = exit->src;
4535 /* Redirect the entry and add the phi node arguments. */
4536 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4537 gcc_assert (redirected != NULL);
4538 flush_pending_stmts (entry);
4540 /* Concerning updating of dominators: We must recount dominators
4541 for entry block and its copy. Anything that is outside of the
4542 region, but was dominated by something inside needs recounting as
4543 well. */
4544 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4545 doms[n_doms++] = get_bb_original (entry->dest);
4546 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4547 free (doms);
4549 /* Add the other PHI node arguments. */
4550 add_phi_args_after_copy (region_copy, n_region);
4552 /* Update the SSA web. */
4553 update_ssa (TODO_update_ssa);
4555 if (free_region_copy)
4556 free (region_copy);
4558 free_original_copy_tables ();
4559 return true;
4563 DEF_VEC_P(basic_block);
4564 DEF_VEC_ALLOC_P(basic_block,heap);
4567 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4568 adding blocks when the dominator traversal reaches EXIT. This
4569 function silently assumes that ENTRY strictly dominates EXIT. */
4571 static void
4572 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4573 VEC(basic_block,heap) **bbs_p)
4575 basic_block son;
4577 for (son = first_dom_son (CDI_DOMINATORS, entry);
4578 son;
4579 son = next_dom_son (CDI_DOMINATORS, son))
4581 VEC_safe_push (basic_block, heap, *bbs_p, son);
4582 if (son != exit)
4583 gather_blocks_in_sese_region (son, exit, bbs_p);
4588 struct move_stmt_d
4590 tree block;
4591 tree from_context;
4592 tree to_context;
4593 bitmap vars_to_remove;
4594 htab_t new_label_map;
4595 bool remap_decls_p;
4598 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4599 contained in *TP and change the DECL_CONTEXT of every local
4600 variable referenced in *TP. */
4602 static tree
4603 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4605 struct move_stmt_d *p = (struct move_stmt_d *) data;
4606 tree t = *tp;
4608 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4609 TREE_BLOCK (t) = p->block;
4611 if (OMP_DIRECTIVE_P (t) && TREE_CODE (t) != OMP_RETURN_EXPR)
4613 /* Do not remap variables inside OMP directives. Variables
4614 referenced in clauses and directive header belong to the
4615 parent function and should not be moved into the child
4616 function. */
4617 bool save_remap_decls_p = p->remap_decls_p;
4618 p->remap_decls_p = false;
4619 *walk_subtrees = 0;
4621 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4623 p->remap_decls_p = save_remap_decls_p;
4625 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4627 if (TREE_CODE (t) == LABEL_DECL)
4629 if (p->new_label_map)
4631 struct tree_map in, *out;
4632 in.from = t;
4633 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4634 if (out)
4635 *tp = t = out->to;
4638 DECL_CONTEXT (t) = p->to_context;
4640 else if (p->remap_decls_p)
4642 DECL_CONTEXT (t) = p->to_context;
4644 if (TREE_CODE (t) == VAR_DECL)
4646 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4647 f->unexpanded_var_list
4648 = tree_cons (0, t, f->unexpanded_var_list);
4650 /* Mark T to be removed from the original function,
4651 otherwise it will be given a DECL_RTL when the
4652 original function is expanded. */
4653 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4657 else if (TYPE_P (t))
4658 *walk_subtrees = 0;
4660 return NULL_TREE;
4664 /* Move basic block BB from function CFUN to function DEST_FN. The
4665 block is moved out of the original linked list and placed after
4666 block AFTER in the new list. Also, the block is removed from the
4667 original array of blocks and placed in DEST_FN's array of blocks.
4668 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4669 updated to reflect the moved edges.
4671 On exit, local variables that need to be removed from
4672 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4674 static void
4675 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4676 basic_block after, bool update_edge_count_p,
4677 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4679 struct control_flow_graph *cfg;
4680 edge_iterator ei;
4681 edge e;
4682 block_stmt_iterator si;
4683 struct move_stmt_d d;
4684 unsigned old_len, new_len;
4685 basic_block *addr;
4687 /* Link BB to the new linked list. */
4688 move_block_after (bb, after);
4690 /* Update the edge count in the corresponding flowgraphs. */
4691 if (update_edge_count_p)
4692 FOR_EACH_EDGE (e, ei, bb->succs)
4694 cfun->cfg->x_n_edges--;
4695 dest_cfun->cfg->x_n_edges++;
4698 /* Remove BB from the original basic block array. */
4699 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4700 cfun->cfg->x_n_basic_blocks--;
4702 /* Grow DEST_CFUN's basic block array if needed. */
4703 cfg = dest_cfun->cfg;
4704 cfg->x_n_basic_blocks++;
4705 if (bb->index > cfg->x_last_basic_block)
4706 cfg->x_last_basic_block = bb->index;
4708 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4709 if ((unsigned) cfg->x_last_basic_block >= old_len)
4711 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4712 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4713 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4714 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4717 VEC_replace (basic_block, cfg->x_basic_block_info,
4718 cfg->x_last_basic_block, bb);
4720 /* The statements in BB need to be associated with a new TREE_BLOCK.
4721 Labels need to be associated with a new label-to-block map. */
4722 memset (&d, 0, sizeof (d));
4723 d.vars_to_remove = vars_to_remove;
4725 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4727 tree stmt = bsi_stmt (si);
4728 int region;
4730 d.from_context = cfun->decl;
4731 d.to_context = dest_cfun->decl;
4732 d.remap_decls_p = true;
4733 d.new_label_map = new_label_map;
4734 if (TREE_BLOCK (stmt))
4735 d.block = DECL_INITIAL (dest_cfun->decl);
4737 walk_tree (&stmt, move_stmt_r, &d, NULL);
4739 if (TREE_CODE (stmt) == LABEL_EXPR)
4741 tree label = LABEL_EXPR_LABEL (stmt);
4742 int uid = LABEL_DECL_UID (label);
4744 gcc_assert (uid > -1);
4746 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4747 if (old_len <= (unsigned) uid)
4749 new_len = 3 * uid / 2;
4750 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4751 new_len);
4752 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4753 memset (&addr[old_len], 0,
4754 sizeof (basic_block) * (new_len - old_len));
4757 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4758 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4760 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4762 if (uid >= dest_cfun->last_label_uid)
4763 dest_cfun->last_label_uid = uid + 1;
4765 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4766 TREE_OPERAND (stmt, 0) =
4767 build_int_cst (NULL_TREE,
4768 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4769 + eh_offset);
4771 region = lookup_stmt_eh_region (stmt);
4772 if (region >= 0)
4774 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4775 remove_stmt_from_eh_region (stmt);
4780 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4781 the outermost EH region. Use REGION as the incoming base EH region. */
4783 static int
4784 find_outermost_region_in_block (struct function *src_cfun,
4785 basic_block bb, int region)
4787 block_stmt_iterator si;
4789 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4791 tree stmt = bsi_stmt (si);
4792 int stmt_region;
4794 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4795 if (stmt_region > 0
4796 && (region < 0 || eh_region_outer_p (src_cfun, stmt_region, region)))
4797 region = stmt_region;
4800 return region;
4803 static tree
4804 new_label_mapper (tree decl, void *data)
4806 htab_t hash = (htab_t) data;
4807 struct tree_map *m;
4808 void **slot;
4810 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4812 m = xmalloc (sizeof (struct tree_map));
4813 m->hash = DECL_UID (decl);
4814 m->from = decl;
4815 m->to = create_artificial_label ();
4816 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4818 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4819 gcc_assert (*slot == NULL);
4821 *slot = m;
4823 return m->to;
4826 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4827 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4828 single basic block in the original CFG and the new basic block is
4829 returned. DEST_CFUN must not have a CFG yet.
4831 Note that the region need not be a pure SESE region. Blocks inside
4832 the region may contain calls to abort/exit. The only restriction
4833 is that ENTRY_BB should be the only entry point and it must
4834 dominate EXIT_BB.
4836 All local variables referenced in the region are assumed to be in
4837 the corresponding BLOCK_VARS and unexpanded variable lists
4838 associated with DEST_CFUN. */
4840 basic_block
4841 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4842 basic_block exit_bb)
4844 VEC(basic_block,heap) *bbs;
4845 basic_block after, bb, *entry_pred, *exit_succ;
4846 struct function *saved_cfun;
4847 int *entry_flag, *exit_flag, eh_offset;
4848 unsigned i, num_entry_edges, num_exit_edges;
4849 edge e;
4850 edge_iterator ei;
4851 bitmap vars_to_remove;
4852 htab_t new_label_map;
4854 saved_cfun = cfun;
4856 /* Collect all the blocks in the region. Manually add ENTRY_BB
4857 because it won't be added by dfs_enumerate_from. */
4858 calculate_dominance_info (CDI_DOMINATORS);
4860 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4861 region. */
4862 gcc_assert (entry_bb != exit_bb
4863 && dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb));
4865 bbs = NULL;
4866 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4867 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4869 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4870 the predecessor edges to ENTRY_BB and the successor edges to
4871 EXIT_BB so that we can re-attach them to the new basic block that
4872 will replace the region. */
4873 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4874 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4875 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4876 i = 0;
4877 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4879 entry_flag[i] = e->flags;
4880 entry_pred[i++] = e->src;
4881 remove_edge (e);
4884 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4885 exit_succ = (basic_block *) xcalloc (num_exit_edges, sizeof (basic_block));
4886 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4887 i = 0;
4888 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4890 exit_flag[i] = e->flags;
4891 exit_succ[i++] = e->dest;
4892 remove_edge (e);
4895 /* Switch context to the child function to initialize DEST_FN's CFG. */
4896 gcc_assert (dest_cfun->cfg == NULL);
4897 cfun = dest_cfun;
4899 init_empty_tree_cfg ();
4901 /* Initialize EH information for the new function. */
4902 eh_offset = 0;
4903 new_label_map = NULL;
4904 if (saved_cfun->eh)
4906 int region = -1;
4908 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4909 region = find_outermost_region_in_block (saved_cfun, bb, region);
4911 init_eh_for_function ();
4912 if (region != -1)
4914 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4915 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4916 new_label_map, region, 0);
4920 cfun = saved_cfun;
4922 /* Move blocks from BBS into DEST_CFUN. */
4923 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4924 after = dest_cfun->cfg->x_entry_block_ptr;
4925 vars_to_remove = BITMAP_ALLOC (NULL);
4926 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4928 /* No need to update edge counts on the last block. It has
4929 already been updated earlier when we detached the region from
4930 the original CFG. */
4931 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4932 new_label_map, eh_offset);
4933 after = bb;
4936 if (new_label_map)
4937 htab_delete (new_label_map);
4939 /* Remove the variables marked in VARS_TO_REMOVE from
4940 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4941 DECL_RTL in the context of CFUN. */
4942 if (!bitmap_empty_p (vars_to_remove))
4944 tree *p;
4946 for (p = &cfun->unexpanded_var_list; *p; )
4948 tree var = TREE_VALUE (*p);
4949 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4951 *p = TREE_CHAIN (*p);
4952 continue;
4955 p = &TREE_CHAIN (*p);
4959 BITMAP_FREE (vars_to_remove);
4961 /* Rewire the entry and exit blocks. The successor to the entry
4962 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4963 the child function. Similarly, the predecessor of DEST_FN's
4964 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4965 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4966 various CFG manipulation function get to the right CFG.
4968 FIXME, this is silly. The CFG ought to become a parameter to
4969 these helpers. */
4970 cfun = dest_cfun;
4971 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4972 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4973 cfun = saved_cfun;
4975 /* Back in the original function, the SESE region has disappeared,
4976 create a new basic block in its place. */
4977 bb = create_empty_bb (entry_pred[0]);
4978 for (i = 0; i < num_entry_edges; i++)
4979 make_edge (entry_pred[i], bb, entry_flag[i]);
4981 for (i = 0; i < num_exit_edges; i++)
4982 make_edge (bb, exit_succ[i], exit_flag[i]);
4984 free (exit_flag);
4985 free (entry_flag);
4986 free (entry_pred);
4987 free (exit_succ);
4988 free_dominance_info (CDI_DOMINATORS);
4989 free_dominance_info (CDI_POST_DOMINATORS);
4990 VEC_free (basic_block, heap, bbs);
4992 return bb;
4996 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4998 void
4999 dump_function_to_file (tree fn, FILE *file, int flags)
5001 tree arg, vars, var;
5002 bool ignore_topmost_bind = false, any_var = false;
5003 basic_block bb;
5004 tree chain;
5005 struct function *saved_cfun;
5007 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5009 arg = DECL_ARGUMENTS (fn);
5010 while (arg)
5012 print_generic_expr (file, arg, dump_flags);
5013 if (TREE_CHAIN (arg))
5014 fprintf (file, ", ");
5015 arg = TREE_CHAIN (arg);
5017 fprintf (file, ")\n");
5019 if (flags & TDF_DETAILS)
5020 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5021 if (flags & TDF_RAW)
5023 dump_node (fn, TDF_SLIM | flags, file);
5024 return;
5027 /* Switch CFUN to point to FN. */
5028 saved_cfun = cfun;
5029 cfun = DECL_STRUCT_FUNCTION (fn);
5031 /* When GIMPLE is lowered, the variables are no longer available in
5032 BIND_EXPRs, so display them separately. */
5033 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5035 ignore_topmost_bind = true;
5037 fprintf (file, "{\n");
5038 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5040 var = TREE_VALUE (vars);
5042 print_generic_decl (file, var, flags);
5043 fprintf (file, "\n");
5045 any_var = true;
5049 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5051 /* Make a CFG based dump. */
5052 check_bb_profile (ENTRY_BLOCK_PTR, file);
5053 if (!ignore_topmost_bind)
5054 fprintf (file, "{\n");
5056 if (any_var && n_basic_blocks)
5057 fprintf (file, "\n");
5059 FOR_EACH_BB (bb)
5060 dump_generic_bb (file, bb, 2, flags);
5062 fprintf (file, "}\n");
5063 check_bb_profile (EXIT_BLOCK_PTR, file);
5065 else
5067 int indent;
5069 /* Make a tree based dump. */
5070 chain = DECL_SAVED_TREE (fn);
5072 if (chain && TREE_CODE (chain) == BIND_EXPR)
5074 if (ignore_topmost_bind)
5076 chain = BIND_EXPR_BODY (chain);
5077 indent = 2;
5079 else
5080 indent = 0;
5082 else
5084 if (!ignore_topmost_bind)
5085 fprintf (file, "{\n");
5086 indent = 2;
5089 if (any_var)
5090 fprintf (file, "\n");
5092 print_generic_stmt_indented (file, chain, flags, indent);
5093 if (ignore_topmost_bind)
5094 fprintf (file, "}\n");
5097 fprintf (file, "\n\n");
5099 /* Restore CFUN. */
5100 cfun = saved_cfun;
5104 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5106 void
5107 debug_function (tree fn, int flags)
5109 dump_function_to_file (fn, stderr, flags);
5113 /* Pretty print of the loops intermediate representation. */
5114 static void print_loop (FILE *, struct loop *, int);
5115 static void print_pred_bbs (FILE *, basic_block bb);
5116 static void print_succ_bbs (FILE *, basic_block bb);
5119 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5121 static void
5122 print_pred_bbs (FILE *file, basic_block bb)
5124 edge e;
5125 edge_iterator ei;
5127 FOR_EACH_EDGE (e, ei, bb->preds)
5128 fprintf (file, "bb_%d ", e->src->index);
5132 /* Print on FILE the indexes for the successors of basic_block BB. */
5134 static void
5135 print_succ_bbs (FILE *file, basic_block bb)
5137 edge e;
5138 edge_iterator ei;
5140 FOR_EACH_EDGE (e, ei, bb->succs)
5141 fprintf (file, "bb_%d ", e->dest->index);
5145 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5147 static void
5148 print_loop (FILE *file, struct loop *loop, int indent)
5150 char *s_indent;
5151 basic_block bb;
5153 if (loop == NULL)
5154 return;
5156 s_indent = (char *) alloca ((size_t) indent + 1);
5157 memset ((void *) s_indent, ' ', (size_t) indent);
5158 s_indent[indent] = '\0';
5160 /* Print the loop's header. */
5161 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5163 /* Print the loop's body. */
5164 fprintf (file, "%s{\n", s_indent);
5165 FOR_EACH_BB (bb)
5166 if (bb->loop_father == loop)
5168 /* Print the basic_block's header. */
5169 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5170 print_pred_bbs (file, bb);
5171 fprintf (file, "}, succs = {");
5172 print_succ_bbs (file, bb);
5173 fprintf (file, "})\n");
5175 /* Print the basic_block's body. */
5176 fprintf (file, "%s {\n", s_indent);
5177 tree_dump_bb (bb, file, indent + 4);
5178 fprintf (file, "%s }\n", s_indent);
5181 print_loop (file, loop->inner, indent + 2);
5182 fprintf (file, "%s}\n", s_indent);
5183 print_loop (file, loop->next, indent);
5187 /* Follow a CFG edge from the entry point of the program, and on entry
5188 of a loop, pretty print the loop structure on FILE. */
5190 void
5191 print_loop_ir (FILE *file)
5193 basic_block bb;
5195 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5196 if (bb && bb->loop_father)
5197 print_loop (file, bb->loop_father, 0);
5201 /* Debugging loops structure at tree level. */
5203 void
5204 debug_loop_ir (void)
5206 print_loop_ir (stderr);
5210 /* Return true if BB ends with a call, possibly followed by some
5211 instructions that must stay with the call. Return false,
5212 otherwise. */
5214 static bool
5215 tree_block_ends_with_call_p (basic_block bb)
5217 block_stmt_iterator bsi = bsi_last (bb);
5218 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5222 /* Return true if BB ends with a conditional branch. Return false,
5223 otherwise. */
5225 static bool
5226 tree_block_ends_with_condjump_p (basic_block bb)
5228 tree stmt = last_stmt (bb);
5229 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5233 /* Return true if we need to add fake edge to exit at statement T.
5234 Helper function for tree_flow_call_edges_add. */
5236 static bool
5237 need_fake_edge_p (tree t)
5239 tree call;
5241 /* NORETURN and LONGJMP calls already have an edge to exit.
5242 CONST and PURE calls do not need one.
5243 We don't currently check for CONST and PURE here, although
5244 it would be a good idea, because those attributes are
5245 figured out from the RTL in mark_constant_function, and
5246 the counter incrementation code from -fprofile-arcs
5247 leads to different results from -fbranch-probabilities. */
5248 call = get_call_expr_in (t);
5249 if (call
5250 && !(call_expr_flags (call) & ECF_NORETURN))
5251 return true;
5253 if (TREE_CODE (t) == ASM_EXPR
5254 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5255 return true;
5257 return false;
5261 /* Add fake edges to the function exit for any non constant and non
5262 noreturn calls, volatile inline assembly in the bitmap of blocks
5263 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5264 the number of blocks that were split.
5266 The goal is to expose cases in which entering a basic block does
5267 not imply that all subsequent instructions must be executed. */
5269 static int
5270 tree_flow_call_edges_add (sbitmap blocks)
5272 int i;
5273 int blocks_split = 0;
5274 int last_bb = last_basic_block;
5275 bool check_last_block = false;
5277 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5278 return 0;
5280 if (! blocks)
5281 check_last_block = true;
5282 else
5283 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5285 /* In the last basic block, before epilogue generation, there will be
5286 a fallthru edge to EXIT. Special care is required if the last insn
5287 of the last basic block is a call because make_edge folds duplicate
5288 edges, which would result in the fallthru edge also being marked
5289 fake, which would result in the fallthru edge being removed by
5290 remove_fake_edges, which would result in an invalid CFG.
5292 Moreover, we can't elide the outgoing fake edge, since the block
5293 profiler needs to take this into account in order to solve the minimal
5294 spanning tree in the case that the call doesn't return.
5296 Handle this by adding a dummy instruction in a new last basic block. */
5297 if (check_last_block)
5299 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5300 block_stmt_iterator bsi = bsi_last (bb);
5301 tree t = NULL_TREE;
5302 if (!bsi_end_p (bsi))
5303 t = bsi_stmt (bsi);
5305 if (t && need_fake_edge_p (t))
5307 edge e;
5309 e = find_edge (bb, EXIT_BLOCK_PTR);
5310 if (e)
5312 bsi_insert_on_edge (e, build_empty_stmt ());
5313 bsi_commit_edge_inserts ();
5318 /* Now add fake edges to the function exit for any non constant
5319 calls since there is no way that we can determine if they will
5320 return or not... */
5321 for (i = 0; i < last_bb; i++)
5323 basic_block bb = BASIC_BLOCK (i);
5324 block_stmt_iterator bsi;
5325 tree stmt, last_stmt;
5327 if (!bb)
5328 continue;
5330 if (blocks && !TEST_BIT (blocks, i))
5331 continue;
5333 bsi = bsi_last (bb);
5334 if (!bsi_end_p (bsi))
5336 last_stmt = bsi_stmt (bsi);
5339 stmt = bsi_stmt (bsi);
5340 if (need_fake_edge_p (stmt))
5342 edge e;
5343 /* The handling above of the final block before the
5344 epilogue should be enough to verify that there is
5345 no edge to the exit block in CFG already.
5346 Calling make_edge in such case would cause us to
5347 mark that edge as fake and remove it later. */
5348 #ifdef ENABLE_CHECKING
5349 if (stmt == last_stmt)
5351 e = find_edge (bb, EXIT_BLOCK_PTR);
5352 gcc_assert (e == NULL);
5354 #endif
5356 /* Note that the following may create a new basic block
5357 and renumber the existing basic blocks. */
5358 if (stmt != last_stmt)
5360 e = split_block (bb, stmt);
5361 if (e)
5362 blocks_split++;
5364 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5366 bsi_prev (&bsi);
5368 while (!bsi_end_p (bsi));
5372 if (blocks_split)
5373 verify_flow_info ();
5375 return blocks_split;
5378 bool
5379 tree_purge_dead_eh_edges (basic_block bb)
5381 bool changed = false;
5382 edge e;
5383 edge_iterator ei;
5384 tree stmt = last_stmt (bb);
5386 if (stmt && tree_can_throw_internal (stmt))
5387 return false;
5389 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5391 if (e->flags & EDGE_EH)
5393 remove_edge (e);
5394 changed = true;
5396 else
5397 ei_next (&ei);
5400 /* Removal of dead EH edges might change dominators of not
5401 just immediate successors. E.g. when bb1 is changed so that
5402 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5403 eh edges purged by this function in:
5407 1-->2
5408 / \ |
5409 v v |
5410 3-->4 |
5412 --->5
5415 idom(bb5) must be recomputed. For now just free the dominance
5416 info. */
5417 if (changed)
5418 free_dominance_info (CDI_DOMINATORS);
5420 return changed;
5423 bool
5424 tree_purge_all_dead_eh_edges (bitmap blocks)
5426 bool changed = false;
5427 unsigned i;
5428 bitmap_iterator bi;
5430 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5432 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5435 return changed;
5438 /* This function is called whenever a new edge is created or
5439 redirected. */
5441 static void
5442 tree_execute_on_growing_pred (edge e)
5444 basic_block bb = e->dest;
5446 if (phi_nodes (bb))
5447 reserve_phi_args_for_new_edge (bb);
5450 /* This function is called immediately before edge E is removed from
5451 the edge vector E->dest->preds. */
5453 static void
5454 tree_execute_on_shrinking_pred (edge e)
5456 if (phi_nodes (e->dest))
5457 remove_phi_args (e);
5460 /*---------------------------------------------------------------------------
5461 Helper functions for Loop versioning
5462 ---------------------------------------------------------------------------*/
5464 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5465 of 'first'. Both of them are dominated by 'new_head' basic block. When
5466 'new_head' was created by 'second's incoming edge it received phi arguments
5467 on the edge by split_edge(). Later, additional edge 'e' was created to
5468 connect 'new_head' and 'first'. Now this routine adds phi args on this
5469 additional edge 'e' that new_head to second edge received as part of edge
5470 splitting.
5473 static void
5474 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5475 basic_block new_head, edge e)
5477 tree phi1, phi2;
5478 edge e2 = find_edge (new_head, second);
5480 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5481 edge, we should always have an edge from NEW_HEAD to SECOND. */
5482 gcc_assert (e2 != NULL);
5484 /* Browse all 'second' basic block phi nodes and add phi args to
5485 edge 'e' for 'first' head. PHI args are always in correct order. */
5487 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5488 phi2 && phi1;
5489 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5491 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5492 add_phi_arg (phi1, def, e);
5496 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5497 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5498 the destination of the ELSE part. */
5499 static void
5500 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5501 basic_block cond_bb, void *cond_e)
5503 block_stmt_iterator bsi;
5504 tree goto1 = NULL_TREE;
5505 tree goto2 = NULL_TREE;
5506 tree new_cond_expr = NULL_TREE;
5507 tree cond_expr = (tree) cond_e;
5508 edge e0;
5510 /* Build new conditional expr */
5511 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5512 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5513 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5515 /* Add new cond in cond_bb. */
5516 bsi = bsi_start (cond_bb);
5517 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5518 /* Adjust edges appropriately to connect new head with first head
5519 as well as second head. */
5520 e0 = single_succ_edge (cond_bb);
5521 e0->flags &= ~EDGE_FALLTHRU;
5522 e0->flags |= EDGE_FALSE_VALUE;
5525 struct cfg_hooks tree_cfg_hooks = {
5526 "tree",
5527 tree_verify_flow_info,
5528 tree_dump_bb, /* dump_bb */
5529 create_bb, /* create_basic_block */
5530 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5531 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5532 remove_bb, /* delete_basic_block */
5533 tree_split_block, /* split_block */
5534 tree_move_block_after, /* move_block_after */
5535 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5536 tree_merge_blocks, /* merge_blocks */
5537 tree_predict_edge, /* predict_edge */
5538 tree_predicted_by_p, /* predicted_by_p */
5539 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5540 tree_duplicate_bb, /* duplicate_block */
5541 tree_split_edge, /* split_edge */
5542 tree_make_forwarder_block, /* make_forward_block */
5543 NULL, /* tidy_fallthru_edge */
5544 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5545 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5546 tree_flow_call_edges_add, /* flow_call_edges_add */
5547 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5548 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5549 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5550 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5551 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5552 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5553 flush_pending_stmts /* flush_pending_stmts */
5557 /* Split all critical edges. */
5559 static unsigned int
5560 split_critical_edges (void)
5562 basic_block bb;
5563 edge e;
5564 edge_iterator ei;
5566 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5567 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5568 mappings around the calls to split_edge. */
5569 start_recording_case_labels ();
5570 FOR_ALL_BB (bb)
5572 FOR_EACH_EDGE (e, ei, bb->succs)
5573 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5575 split_edge (e);
5578 end_recording_case_labels ();
5579 return 0;
5582 struct tree_opt_pass pass_split_crit_edges =
5584 "crited", /* name */
5585 NULL, /* gate */
5586 split_critical_edges, /* execute */
5587 NULL, /* sub */
5588 NULL, /* next */
5589 0, /* static_pass_number */
5590 TV_TREE_SPLIT_EDGES, /* tv_id */
5591 PROP_cfg, /* properties required */
5592 PROP_no_crit_edges, /* properties_provided */
5593 0, /* properties_destroyed */
5594 0, /* todo_flags_start */
5595 TODO_dump_func, /* todo_flags_finish */
5596 0 /* letter */
5600 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5601 a temporary, make sure and register it to be renamed if necessary,
5602 and finally return the temporary. Put the statements to compute
5603 EXP before the current statement in BSI. */
5605 tree
5606 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5608 tree t, new_stmt, orig_stmt;
5610 if (is_gimple_val (exp))
5611 return exp;
5613 t = make_rename_temp (type, NULL);
5614 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5616 orig_stmt = bsi_stmt (*bsi);
5617 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5618 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5620 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5622 return t;
5625 /* Build a ternary operation and gimplify it. Emit code before BSI.
5626 Return the gimple_val holding the result. */
5628 tree
5629 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5630 tree type, tree a, tree b, tree c)
5632 tree ret;
5634 ret = fold_build3 (code, type, a, b, c);
5635 STRIP_NOPS (ret);
5637 return gimplify_val (bsi, type, ret);
5640 /* Build a binary operation and gimplify it. Emit code before BSI.
5641 Return the gimple_val holding the result. */
5643 tree
5644 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5645 tree type, tree a, tree b)
5647 tree ret;
5649 ret = fold_build2 (code, type, a, b);
5650 STRIP_NOPS (ret);
5652 return gimplify_val (bsi, type, ret);
5655 /* Build a unary operation and gimplify it. Emit code before BSI.
5656 Return the gimple_val holding the result. */
5658 tree
5659 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5660 tree a)
5662 tree ret;
5664 ret = fold_build1 (code, type, a);
5665 STRIP_NOPS (ret);
5667 return gimplify_val (bsi, type, ret);
5672 /* Emit return warnings. */
5674 static unsigned int
5675 execute_warn_function_return (void)
5677 #ifdef USE_MAPPED_LOCATION
5678 source_location location;
5679 #else
5680 location_t *locus;
5681 #endif
5682 tree last;
5683 edge e;
5684 edge_iterator ei;
5686 /* If we have a path to EXIT, then we do return. */
5687 if (TREE_THIS_VOLATILE (cfun->decl)
5688 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5690 #ifdef USE_MAPPED_LOCATION
5691 location = UNKNOWN_LOCATION;
5692 #else
5693 locus = NULL;
5694 #endif
5695 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5697 last = last_stmt (e->src);
5698 if (TREE_CODE (last) == RETURN_EXPR
5699 #ifdef USE_MAPPED_LOCATION
5700 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5701 #else
5702 && (locus = EXPR_LOCUS (last)) != NULL)
5703 #endif
5704 break;
5706 #ifdef USE_MAPPED_LOCATION
5707 if (location == UNKNOWN_LOCATION)
5708 location = cfun->function_end_locus;
5709 warning (0, "%H%<noreturn%> function does return", &location);
5710 #else
5711 if (!locus)
5712 locus = &cfun->function_end_locus;
5713 warning (0, "%H%<noreturn%> function does return", locus);
5714 #endif
5717 /* If we see "return;" in some basic block, then we do reach the end
5718 without returning a value. */
5719 else if (warn_return_type
5720 && !TREE_NO_WARNING (cfun->decl)
5721 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5722 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5724 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5726 tree last = last_stmt (e->src);
5727 if (TREE_CODE (last) == RETURN_EXPR
5728 && TREE_OPERAND (last, 0) == NULL
5729 && !TREE_NO_WARNING (last))
5731 #ifdef USE_MAPPED_LOCATION
5732 location = EXPR_LOCATION (last);
5733 if (location == UNKNOWN_LOCATION)
5734 location = cfun->function_end_locus;
5735 warning (0, "%Hcontrol reaches end of non-void function", &location);
5736 #else
5737 locus = EXPR_LOCUS (last);
5738 if (!locus)
5739 locus = &cfun->function_end_locus;
5740 warning (0, "%Hcontrol reaches end of non-void function", locus);
5741 #endif
5742 TREE_NO_WARNING (cfun->decl) = 1;
5743 break;
5747 return 0;
5751 /* Given a basic block B which ends with a conditional and has
5752 precisely two successors, determine which of the edges is taken if
5753 the conditional is true and which is taken if the conditional is
5754 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5756 void
5757 extract_true_false_edges_from_block (basic_block b,
5758 edge *true_edge,
5759 edge *false_edge)
5761 edge e = EDGE_SUCC (b, 0);
5763 if (e->flags & EDGE_TRUE_VALUE)
5765 *true_edge = e;
5766 *false_edge = EDGE_SUCC (b, 1);
5768 else
5770 *false_edge = e;
5771 *true_edge = EDGE_SUCC (b, 1);
5775 struct tree_opt_pass pass_warn_function_return =
5777 NULL, /* name */
5778 NULL, /* gate */
5779 execute_warn_function_return, /* execute */
5780 NULL, /* sub */
5781 NULL, /* next */
5782 0, /* static_pass_number */
5783 0, /* tv_id */
5784 PROP_cfg, /* properties_required */
5785 0, /* properties_provided */
5786 0, /* properties_destroyed */
5787 0, /* todo_flags_start */
5788 0, /* todo_flags_finish */
5789 0 /* letter */
5792 /* Emit noreturn warnings. */
5794 static unsigned int
5795 execute_warn_function_noreturn (void)
5797 if (warn_missing_noreturn
5798 && !TREE_THIS_VOLATILE (cfun->decl)
5799 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5800 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5801 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5802 "for attribute %<noreturn%>",
5803 cfun->decl);
5804 return 0;
5807 struct tree_opt_pass pass_warn_function_noreturn =
5809 NULL, /* name */
5810 NULL, /* gate */
5811 execute_warn_function_noreturn, /* execute */
5812 NULL, /* sub */
5813 NULL, /* next */
5814 0, /* static_pass_number */
5815 0, /* tv_id */
5816 PROP_cfg, /* properties_required */
5817 0, /* properties_provided */
5818 0, /* properties_destroyed */
5819 0, /* todo_flags_start */
5820 0, /* todo_flags_finish */
5821 0 /* letter */