Merge from mainline
[official-gcc.git] / gcc / tree-cfg.c
blob60dacbd5fc8f9e0ff55acb3f6ff3c9ba4e8afd54
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static void
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
233 struct tree_opt_pass pass_build_cfg =
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts, /* todo_flags_finish */
247 0 /* letter */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
253 normal form. */
255 static void
256 factor_computed_gotos (void)
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
268 FOR_EACH_BB (bb)
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
285 tree assignment;
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build2 (MODIFY_EXPR, ptr_type_node,
317 var, GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
329 static void
330 make_blocks (tree stmt_list)
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
338 while (!tsi_end_p (i))
340 tree prev_stmt;
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
368 tsi_next (&i);
369 first_stmt_of_list = false;
374 /* Create and return a new empty basic block after bb AFTER. */
376 static basic_block
377 create_bb (void *h, void *e, basic_block after)
379 basic_block bb;
381 gcc_assert (!e);
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
386 bb = alloc_block ();
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
390 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392 /* Add the new block to the linked list of blocks. */
393 link_block (bb, after);
395 /* Grow the basic block array if needed. */
396 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 size_t old_size = VEC_length (basic_block, basic_block_info);
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 basic_block *p;
401 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
402 p = VEC_address (basic_block, basic_block_info);
403 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
406 /* Add the newly created block to the array. */
407 SET_BASIC_BLOCK (last_basic_block, bb);
409 n_basic_blocks++;
410 last_basic_block++;
412 return bb;
416 /*---------------------------------------------------------------------------
417 Edge creation
418 ---------------------------------------------------------------------------*/
420 /* Fold COND_EXPR_COND of each COND_EXPR. */
422 void
423 fold_cond_expr_cond (void)
425 basic_block bb;
427 FOR_EACH_BB (bb)
429 tree stmt = last_stmt (bb);
431 if (stmt
432 && TREE_CODE (stmt) == COND_EXPR)
434 tree cond = fold (COND_EXPR_COND (stmt));
435 if (integer_zerop (cond))
436 COND_EXPR_COND (stmt) = boolean_false_node;
437 else if (integer_onep (cond))
438 COND_EXPR_COND (stmt) = boolean_true_node;
443 /* Join all the blocks in the flowgraph. */
445 static void
446 make_edges (void)
448 basic_block bb;
450 /* Create an edge from entry to the first block with executable
451 statements in it. */
452 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
454 /* Traverse the basic block array placing edges. */
455 FOR_EACH_BB (bb)
457 tree first = first_stmt (bb);
458 tree last = last_stmt (bb);
460 if (first)
462 /* Edges for statements that always alter flow control. */
463 if (is_ctrl_stmt (last))
464 make_ctrl_stmt_edges (bb);
466 /* Edges for statements that sometimes alter flow control. */
467 if (is_ctrl_altering_stmt (last))
468 make_exit_edges (bb);
471 /* Finally, if no edges were created above, this is a regular
472 basic block that only needs a fallthru edge. */
473 if (EDGE_COUNT (bb->succs) == 0)
474 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
477 /* We do not care about fake edges, so remove any that the CFG
478 builder inserted for completeness. */
479 remove_fake_exit_edges ();
481 /* Fold COND_EXPR_COND of each COND_EXPR. */
482 fold_cond_expr_cond ();
484 /* Clean up the graph and warn for unreachable code. */
485 cleanup_tree_cfg ();
489 /* Link an OMP_SECTIONS block to all the OMP_SECTION blocks in its body. */
491 static void
492 make_omp_sections_edges (basic_block bb)
494 basic_block exit_bb;
495 size_t i, n;
496 tree vec, stmt;
498 stmt = last_stmt (bb);
499 vec = OMP_SECTIONS_SECTIONS (stmt);
500 n = TREE_VEC_LENGTH (vec);
501 exit_bb = bb_for_stmt (TREE_VEC_ELT (vec, n - 1));
503 for (i = 0; i < n - 1; i += 2)
505 basic_block start_bb = bb_for_stmt (TREE_VEC_ELT (vec, i));
506 basic_block end_bb = bb_for_stmt (TREE_VEC_ELT (vec, i + 1));
507 make_edge (bb, start_bb, EDGE_ABNORMAL);
508 make_edge (end_bb, exit_bb, EDGE_FALLTHRU);
511 /* Once the CFG has been built, the vector of sections is no longer
512 useful. The region can be easily obtained with build_omp_regions.
513 Furthermore, this sharing of tree expressions is not allowed by the
514 statement verifier. */
515 OMP_SECTIONS_SECTIONS (stmt) = NULL_TREE;
520 /* Create edges for control statement at basic block BB. */
522 static void
523 make_ctrl_stmt_edges (basic_block bb)
525 tree last = last_stmt (bb);
527 gcc_assert (last);
528 switch (TREE_CODE (last))
530 case GOTO_EXPR:
531 make_goto_expr_edges (bb);
532 break;
534 case RETURN_EXPR:
535 make_edge (bb, EXIT_BLOCK_PTR, 0);
536 break;
538 case COND_EXPR:
539 make_cond_expr_edges (bb);
540 break;
542 case SWITCH_EXPR:
543 make_switch_expr_edges (bb);
544 break;
546 case RESX_EXPR:
547 make_eh_edges (last);
548 /* Yet another NORETURN hack. */
549 if (EDGE_COUNT (bb->succs) == 0)
550 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
551 break;
553 default:
554 gcc_unreachable ();
559 /* Create exit edges for statements in block BB that alter the flow of
560 control. Statements that alter the control flow are 'goto', 'return'
561 and calls to non-returning functions. */
563 static void
564 make_exit_edges (basic_block bb)
566 tree last = last_stmt (bb), op;
568 gcc_assert (last);
569 switch (TREE_CODE (last))
571 case RESX_EXPR:
572 break;
573 case CALL_EXPR:
574 /* If this function receives a nonlocal goto, then we need to
575 make edges from this call site to all the nonlocal goto
576 handlers. */
577 if (TREE_SIDE_EFFECTS (last)
578 && current_function_has_nonlocal_label)
579 make_goto_expr_edges (bb);
581 /* If this statement has reachable exception handlers, then
582 create abnormal edges to them. */
583 make_eh_edges (last);
585 /* Some calls are known not to return. For such calls we create
586 a fake edge.
588 We really need to revamp how we build edges so that it's not
589 such a bloody pain to avoid creating edges for this case since
590 all we do is remove these edges when we're done building the
591 CFG. */
592 if (call_expr_flags (last) & ECF_NORETURN)
594 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
595 return;
598 /* Don't forget the fall-thru edge. */
599 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
600 break;
602 case MODIFY_EXPR:
603 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
604 may have an abnormal edge. Search the RHS for this case and
605 create any required edges. */
606 op = get_call_expr_in (last);
607 if (op && TREE_SIDE_EFFECTS (op)
608 && current_function_has_nonlocal_label)
609 make_goto_expr_edges (bb);
611 make_eh_edges (last);
612 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
613 break;
615 case OMP_PARALLEL:
616 case OMP_FOR:
617 case OMP_SINGLE:
618 case OMP_MASTER:
619 case OMP_ORDERED:
620 case OMP_CRITICAL:
621 make_edge (bb, bb->next_bb, EDGE_ABNORMAL);
623 case OMP_RETURN_EXPR:
624 if (EDGE_COUNT (bb->succs) == 0)
625 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
626 break;
628 case OMP_SECTIONS:
629 make_omp_sections_edges (bb);
630 break;
632 case OMP_SECTION:
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
634 break;
636 default:
637 gcc_unreachable ();
642 /* Create the edges for a COND_EXPR starting at block BB.
643 At this point, both clauses must contain only simple gotos. */
645 static void
646 make_cond_expr_edges (basic_block bb)
648 tree entry = last_stmt (bb);
649 basic_block then_bb, else_bb;
650 tree then_label, else_label;
651 edge e;
653 gcc_assert (entry);
654 gcc_assert (TREE_CODE (entry) == COND_EXPR);
656 /* Entry basic blocks for each component. */
657 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
658 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
659 then_bb = label_to_block (then_label);
660 else_bb = label_to_block (else_label);
662 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
663 #ifdef USE_MAPPED_LOCATION
664 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
665 #else
666 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
667 #endif
668 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
669 if (e)
671 #ifdef USE_MAPPED_LOCATION
672 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
673 #else
674 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
675 #endif
679 /* Hashing routine for EDGE_TO_CASES. */
681 static hashval_t
682 edge_to_cases_hash (const void *p)
684 edge e = ((struct edge_to_cases_elt *)p)->e;
686 /* Hash on the edge itself (which is a pointer). */
687 return htab_hash_pointer (e);
690 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
691 for equality is just a pointer comparison. */
693 static int
694 edge_to_cases_eq (const void *p1, const void *p2)
696 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
697 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
699 return e1 == e2;
702 /* Called for each element in the hash table (P) as we delete the
703 edge to cases hash table.
705 Clear all the TREE_CHAINs to prevent problems with copying of
706 SWITCH_EXPRs and structure sharing rules, then free the hash table
707 element. */
709 static void
710 edge_to_cases_cleanup (void *p)
712 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
713 tree t, next;
715 for (t = elt->case_labels; t; t = next)
717 next = TREE_CHAIN (t);
718 TREE_CHAIN (t) = NULL;
720 free (p);
723 /* Start recording information mapping edges to case labels. */
725 void
726 start_recording_case_labels (void)
728 gcc_assert (edge_to_cases == NULL);
730 edge_to_cases = htab_create (37,
731 edge_to_cases_hash,
732 edge_to_cases_eq,
733 edge_to_cases_cleanup);
736 /* Return nonzero if we are recording information for case labels. */
738 static bool
739 recording_case_labels_p (void)
741 return (edge_to_cases != NULL);
744 /* Stop recording information mapping edges to case labels and
745 remove any information we have recorded. */
746 void
747 end_recording_case_labels (void)
749 htab_delete (edge_to_cases);
750 edge_to_cases = NULL;
753 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
755 static void
756 record_switch_edge (edge e, tree case_label)
758 struct edge_to_cases_elt *elt;
759 void **slot;
761 /* Build a hash table element so we can see if E is already
762 in the table. */
763 elt = XNEW (struct edge_to_cases_elt);
764 elt->e = e;
765 elt->case_labels = case_label;
767 slot = htab_find_slot (edge_to_cases, elt, INSERT);
769 if (*slot == NULL)
771 /* E was not in the hash table. Install E into the hash table. */
772 *slot = (void *)elt;
774 else
776 /* E was already in the hash table. Free ELT as we do not need it
777 anymore. */
778 free (elt);
780 /* Get the entry stored in the hash table. */
781 elt = (struct edge_to_cases_elt *) *slot;
783 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
784 TREE_CHAIN (case_label) = elt->case_labels;
785 elt->case_labels = case_label;
789 /* If we are inside a {start,end}_recording_cases block, then return
790 a chain of CASE_LABEL_EXPRs from T which reference E.
792 Otherwise return NULL. */
794 static tree
795 get_cases_for_edge (edge e, tree t)
797 struct edge_to_cases_elt elt, *elt_p;
798 void **slot;
799 size_t i, n;
800 tree vec;
802 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
803 chains available. Return NULL so the caller can detect this case. */
804 if (!recording_case_labels_p ())
805 return NULL;
807 restart:
808 elt.e = e;
809 elt.case_labels = NULL;
810 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
812 if (slot)
814 elt_p = (struct edge_to_cases_elt *)*slot;
815 return elt_p->case_labels;
818 /* If we did not find E in the hash table, then this must be the first
819 time we have been queried for information about E & T. Add all the
820 elements from T to the hash table then perform the query again. */
822 vec = SWITCH_LABELS (t);
823 n = TREE_VEC_LENGTH (vec);
824 for (i = 0; i < n; i++)
826 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
827 basic_block label_bb = label_to_block (lab);
828 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
830 goto restart;
833 /* Create the edges for a SWITCH_EXPR starting at block BB.
834 At this point, the switch body has been lowered and the
835 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
837 static void
838 make_switch_expr_edges (basic_block bb)
840 tree entry = last_stmt (bb);
841 size_t i, n;
842 tree vec;
844 vec = SWITCH_LABELS (entry);
845 n = TREE_VEC_LENGTH (vec);
847 for (i = 0; i < n; ++i)
849 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
850 basic_block label_bb = label_to_block (lab);
851 make_edge (bb, label_bb, 0);
856 /* Return the basic block holding label DEST. */
858 basic_block
859 label_to_block_fn (struct function *ifun, tree dest)
861 int uid = LABEL_DECL_UID (dest);
863 /* We would die hard when faced by an undefined label. Emit a label to
864 the very first basic block. This will hopefully make even the dataflow
865 and undefined variable warnings quite right. */
866 if ((errorcount || sorrycount) && uid < 0)
868 block_stmt_iterator bsi =
869 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
870 tree stmt;
872 stmt = build1 (LABEL_EXPR, void_type_node, dest);
873 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
874 uid = LABEL_DECL_UID (dest);
876 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
877 <= (unsigned int) uid)
878 return NULL;
879 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
882 /* Create edges for a goto statement at block BB. */
884 static void
885 make_goto_expr_edges (basic_block bb)
887 tree goto_t;
888 basic_block target_bb;
889 int for_call;
890 block_stmt_iterator last = bsi_last (bb);
892 goto_t = bsi_stmt (last);
894 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
895 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
896 from a nonlocal goto. */
897 if (TREE_CODE (goto_t) != GOTO_EXPR)
898 for_call = 1;
899 else
901 tree dest = GOTO_DESTINATION (goto_t);
902 for_call = 0;
904 /* A GOTO to a local label creates normal edges. */
905 if (simple_goto_p (goto_t))
907 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
908 #ifdef USE_MAPPED_LOCATION
909 e->goto_locus = EXPR_LOCATION (goto_t);
910 #else
911 e->goto_locus = EXPR_LOCUS (goto_t);
912 #endif
913 bsi_remove (&last, true);
914 return;
917 /* Nothing more to do for nonlocal gotos. */
918 if (TREE_CODE (dest) == LABEL_DECL)
919 return;
921 /* Computed gotos remain. */
924 /* Look for the block starting with the destination label. In the
925 case of a computed goto, make an edge to any label block we find
926 in the CFG. */
927 FOR_EACH_BB (target_bb)
929 block_stmt_iterator bsi;
931 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
933 tree target = bsi_stmt (bsi);
935 if (TREE_CODE (target) != LABEL_EXPR)
936 break;
938 if (
939 /* Computed GOTOs. Make an edge to every label block that has
940 been marked as a potential target for a computed goto. */
941 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
942 /* Nonlocal GOTO target. Make an edge to every label block
943 that has been marked as a potential target for a nonlocal
944 goto. */
945 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
947 make_edge (bb, target_bb, EDGE_ABNORMAL);
948 break;
953 /* Degenerate case of computed goto with no labels. */
954 if (!for_call && EDGE_COUNT (bb->succs) == 0)
955 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
959 /*---------------------------------------------------------------------------
960 Flowgraph analysis
961 ---------------------------------------------------------------------------*/
963 /* Cleanup useless labels in basic blocks. This is something we wish
964 to do early because it allows us to group case labels before creating
965 the edges for the CFG, and it speeds up block statement iterators in
966 all passes later on.
967 We only run this pass once, running it more than once is probably not
968 profitable. */
970 /* A map from basic block index to the leading label of that block. */
971 static tree *label_for_bb;
973 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
974 static void
975 update_eh_label (struct eh_region *region)
977 tree old_label = get_eh_region_tree_label (region);
978 if (old_label)
980 tree new_label;
981 basic_block bb = label_to_block (old_label);
983 /* ??? After optimizing, there may be EH regions with labels
984 that have already been removed from the function body, so
985 there is no basic block for them. */
986 if (! bb)
987 return;
989 new_label = label_for_bb[bb->index];
990 set_eh_region_tree_label (region, new_label);
994 /* Given LABEL return the first label in the same basic block. */
995 static tree
996 main_block_label (tree label)
998 basic_block bb = label_to_block (label);
1000 /* label_to_block possibly inserted undefined label into the chain. */
1001 if (!label_for_bb[bb->index])
1002 label_for_bb[bb->index] = label;
1003 return label_for_bb[bb->index];
1006 /* Cleanup redundant labels. This is a three-step process:
1007 1) Find the leading label for each block.
1008 2) Redirect all references to labels to the leading labels.
1009 3) Cleanup all useless labels. */
1011 void
1012 cleanup_dead_labels (void)
1014 basic_block bb;
1015 label_for_bb = XCNEWVEC (tree, last_basic_block);
1017 /* Find a suitable label for each block. We use the first user-defined
1018 label if there is one, or otherwise just the first label we see. */
1019 FOR_EACH_BB (bb)
1021 block_stmt_iterator i;
1023 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1028 break;
1030 label = LABEL_EXPR_LABEL (stmt);
1032 /* If we have not yet seen a label for the current block,
1033 remember this one and see if there are more labels. */
1034 if (! label_for_bb[bb->index])
1036 label_for_bb[bb->index] = label;
1037 continue;
1040 /* If we did see a label for the current block already, but it
1041 is an artificially created label, replace it if the current
1042 label is a user defined label. */
1043 if (! DECL_ARTIFICIAL (label)
1044 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1046 label_for_bb[bb->index] = label;
1047 break;
1052 /* Now redirect all jumps/branches to the selected label.
1053 First do so for each block ending in a control statement. */
1054 FOR_EACH_BB (bb)
1056 tree stmt = last_stmt (bb);
1057 if (!stmt)
1058 continue;
1060 switch (TREE_CODE (stmt))
1062 case COND_EXPR:
1064 tree true_branch, false_branch;
1066 true_branch = COND_EXPR_THEN (stmt);
1067 false_branch = COND_EXPR_ELSE (stmt);
1069 GOTO_DESTINATION (true_branch)
1070 = main_block_label (GOTO_DESTINATION (true_branch));
1071 GOTO_DESTINATION (false_branch)
1072 = main_block_label (GOTO_DESTINATION (false_branch));
1074 break;
1077 case SWITCH_EXPR:
1079 size_t i;
1080 tree vec = SWITCH_LABELS (stmt);
1081 size_t n = TREE_VEC_LENGTH (vec);
1083 /* Replace all destination labels. */
1084 for (i = 0; i < n; ++i)
1086 tree elt = TREE_VEC_ELT (vec, i);
1087 tree label = main_block_label (CASE_LABEL (elt));
1088 CASE_LABEL (elt) = label;
1090 break;
1093 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1094 remove them until after we've created the CFG edges. */
1095 case GOTO_EXPR:
1096 if (! computed_goto_p (stmt))
1098 GOTO_DESTINATION (stmt)
1099 = main_block_label (GOTO_DESTINATION (stmt));
1100 break;
1103 default:
1104 break;
1108 for_each_eh_region (update_eh_label);
1110 /* Finally, purge dead labels. All user-defined labels and labels that
1111 can be the target of non-local gotos and labels which have their
1112 address taken are preserved. */
1113 FOR_EACH_BB (bb)
1115 block_stmt_iterator i;
1116 tree label_for_this_bb = label_for_bb[bb->index];
1118 if (! label_for_this_bb)
1119 continue;
1121 for (i = bsi_start (bb); !bsi_end_p (i); )
1123 tree label, stmt = bsi_stmt (i);
1125 if (TREE_CODE (stmt) != LABEL_EXPR)
1126 break;
1128 label = LABEL_EXPR_LABEL (stmt);
1130 if (label == label_for_this_bb
1131 || ! DECL_ARTIFICIAL (label)
1132 || DECL_NONLOCAL (label)
1133 || FORCED_LABEL (label))
1134 bsi_next (&i);
1135 else
1136 bsi_remove (&i, true);
1140 free (label_for_bb);
1143 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1144 and scan the sorted vector of cases. Combine the ones jumping to the
1145 same label.
1146 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1148 void
1149 group_case_labels (void)
1151 basic_block bb;
1153 FOR_EACH_BB (bb)
1155 tree stmt = last_stmt (bb);
1156 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1158 tree labels = SWITCH_LABELS (stmt);
1159 int old_size = TREE_VEC_LENGTH (labels);
1160 int i, j, new_size = old_size;
1161 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1162 tree default_label;
1164 /* The default label is always the last case in a switch
1165 statement after gimplification. */
1166 default_label = CASE_LABEL (default_case);
1168 /* Look for possible opportunities to merge cases.
1169 Ignore the last element of the label vector because it
1170 must be the default case. */
1171 i = 0;
1172 while (i < old_size - 1)
1174 tree base_case, base_label, base_high;
1175 base_case = TREE_VEC_ELT (labels, i);
1177 gcc_assert (base_case);
1178 base_label = CASE_LABEL (base_case);
1180 /* Discard cases that have the same destination as the
1181 default case. */
1182 if (base_label == default_label)
1184 TREE_VEC_ELT (labels, i) = NULL_TREE;
1185 i++;
1186 new_size--;
1187 continue;
1190 base_high = CASE_HIGH (base_case) ?
1191 CASE_HIGH (base_case) : CASE_LOW (base_case);
1192 i++;
1193 /* Try to merge case labels. Break out when we reach the end
1194 of the label vector or when we cannot merge the next case
1195 label with the current one. */
1196 while (i < old_size - 1)
1198 tree merge_case = TREE_VEC_ELT (labels, i);
1199 tree merge_label = CASE_LABEL (merge_case);
1200 tree t = int_const_binop (PLUS_EXPR, base_high,
1201 integer_one_node, 1);
1203 /* Merge the cases if they jump to the same place,
1204 and their ranges are consecutive. */
1205 if (merge_label == base_label
1206 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1208 base_high = CASE_HIGH (merge_case) ?
1209 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1210 CASE_HIGH (base_case) = base_high;
1211 TREE_VEC_ELT (labels, i) = NULL_TREE;
1212 new_size--;
1213 i++;
1215 else
1216 break;
1220 /* Compress the case labels in the label vector, and adjust the
1221 length of the vector. */
1222 for (i = 0, j = 0; i < new_size; i++)
1224 while (! TREE_VEC_ELT (labels, j))
1225 j++;
1226 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1228 TREE_VEC_LENGTH (labels) = new_size;
1233 /* Checks whether we can merge block B into block A. */
1235 static bool
1236 tree_can_merge_blocks_p (basic_block a, basic_block b)
1238 tree stmt;
1239 block_stmt_iterator bsi;
1240 tree phi;
1242 if (!single_succ_p (a))
1243 return false;
1245 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1246 return false;
1248 if (single_succ (a) != b)
1249 return false;
1251 if (!single_pred_p (b))
1252 return false;
1254 if (b == EXIT_BLOCK_PTR)
1255 return false;
1257 /* If A ends by a statement causing exceptions or something similar, we
1258 cannot merge the blocks. */
1259 stmt = last_stmt (a);
1260 if (stmt && stmt_ends_bb_p (stmt))
1261 return false;
1263 /* Do not allow a block with only a non-local label to be merged. */
1264 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1265 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1266 return false;
1268 /* It must be possible to eliminate all phi nodes in B. If ssa form
1269 is not up-to-date, we cannot eliminate any phis. */
1270 phi = phi_nodes (b);
1271 if (phi)
1273 if (need_ssa_update_p ())
1274 return false;
1276 for (; phi; phi = PHI_CHAIN (phi))
1277 if (!is_gimple_reg (PHI_RESULT (phi))
1278 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1279 return false;
1282 /* Do not remove user labels. */
1283 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1285 stmt = bsi_stmt (bsi);
1286 if (TREE_CODE (stmt) != LABEL_EXPR)
1287 break;
1288 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1289 return false;
1292 /* Protect the loop latches. */
1293 if (current_loops
1294 && b->loop_father->latch == b)
1295 return false;
1297 return true;
1300 /* Replaces all uses of NAME by VAL. */
1302 void
1303 replace_uses_by (tree name, tree val)
1305 imm_use_iterator imm_iter;
1306 use_operand_p use;
1307 tree stmt;
1308 edge e;
1309 unsigned i;
1310 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1312 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1314 stmt = USE_STMT (use);
1315 replace_exp (use, val);
1317 if (TREE_CODE (stmt) == PHI_NODE)
1319 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1320 if (e->flags & EDGE_ABNORMAL)
1322 /* This can only occur for virtual operands, since
1323 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1324 would prevent replacement. */
1325 gcc_assert (!is_gimple_reg (name));
1326 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1329 else
1330 VEC_safe_push (tree, heap, stmts, stmt);
1333 /* We do not update the statements in the loop above. Consider
1334 x = w * w;
1336 If we performed the update in the first loop, the statement
1337 would be rescanned after first occurrence of w is replaced,
1338 the new uses would be placed to the beginning of the list,
1339 and we would never process them. */
1340 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1342 tree rhs;
1344 fold_stmt_inplace (stmt);
1346 rhs = get_rhs (stmt);
1347 if (TREE_CODE (rhs) == ADDR_EXPR)
1348 recompute_tree_invariant_for_addr_expr (rhs);
1350 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1351 mark_new_vars_to_rename (stmt);
1354 VEC_free (tree, heap, stmts);
1356 /* Also update the trees stored in loop structures. */
1357 if (current_loops)
1359 struct loop *loop;
1361 for (i = 0; i < current_loops->num; i++)
1363 loop = current_loops->parray[i];
1364 if (loop)
1365 substitute_in_loop_info (loop, name, val);
1370 /* Merge block B into block A. */
1372 static void
1373 tree_merge_blocks (basic_block a, basic_block b)
1375 block_stmt_iterator bsi;
1376 tree_stmt_iterator last;
1377 tree phi;
1379 if (dump_file)
1380 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1382 /* Remove all single-valued PHI nodes from block B of the form
1383 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1384 bsi = bsi_last (a);
1385 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1387 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1388 tree copy;
1389 bool may_replace_uses = may_propagate_copy (def, use);
1391 /* In case we have loops to care about, do not propagate arguments of
1392 loop closed ssa phi nodes. */
1393 if (current_loops
1394 && is_gimple_reg (def)
1395 && TREE_CODE (use) == SSA_NAME
1396 && a->loop_father != b->loop_father)
1397 may_replace_uses = false;
1399 if (!may_replace_uses)
1401 gcc_assert (is_gimple_reg (def));
1403 /* Note that just emitting the copies is fine -- there is no problem
1404 with ordering of phi nodes. This is because A is the single
1405 predecessor of B, therefore results of the phi nodes cannot
1406 appear as arguments of the phi nodes. */
1407 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1408 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1409 SET_PHI_RESULT (phi, NULL_TREE);
1410 SSA_NAME_DEF_STMT (def) = copy;
1412 else
1413 replace_uses_by (def, use);
1415 remove_phi_node (phi, NULL);
1418 /* Ensure that B follows A. */
1419 move_block_after (b, a);
1421 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1422 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1424 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1425 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1427 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1429 tree label = bsi_stmt (bsi);
1431 bsi_remove (&bsi, false);
1432 /* Now that we can thread computed gotos, we might have
1433 a situation where we have a forced label in block B
1434 However, the label at the start of block B might still be
1435 used in other ways (think about the runtime checking for
1436 Fortran assigned gotos). So we can not just delete the
1437 label. Instead we move the label to the start of block A. */
1438 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1440 block_stmt_iterator dest_bsi = bsi_start (a);
1441 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1444 else
1446 set_bb_for_stmt (bsi_stmt (bsi), a);
1447 bsi_next (&bsi);
1451 /* Merge the chains. */
1452 last = tsi_last (a->stmt_list);
1453 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1454 b->stmt_list = NULL;
1458 /* Return the one of two successors of BB that is not reachable by a
1459 reached by a complex edge, if there is one. Else, return BB. We use
1460 this in optimizations that use post-dominators for their heuristics,
1461 to catch the cases in C++ where function calls are involved. */
1463 basic_block
1464 single_noncomplex_succ (basic_block bb)
1466 edge e0, e1;
1467 if (EDGE_COUNT (bb->succs) != 2)
1468 return bb;
1470 e0 = EDGE_SUCC (bb, 0);
1471 e1 = EDGE_SUCC (bb, 1);
1472 if (e0->flags & EDGE_COMPLEX)
1473 return e1->dest;
1474 if (e1->flags & EDGE_COMPLEX)
1475 return e0->dest;
1477 return bb;
1482 /* Walk the function tree removing unnecessary statements.
1484 * Empty statement nodes are removed
1486 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1488 * Unnecessary COND_EXPRs are removed
1490 * Some unnecessary BIND_EXPRs are removed
1492 Clearly more work could be done. The trick is doing the analysis
1493 and removal fast enough to be a net improvement in compile times.
1495 Note that when we remove a control structure such as a COND_EXPR
1496 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1497 to ensure we eliminate all the useless code. */
1499 struct rus_data
1501 tree *last_goto;
1502 bool repeat;
1503 bool may_throw;
1504 bool may_branch;
1505 bool has_label;
1508 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1510 static bool
1511 remove_useless_stmts_warn_notreached (tree stmt)
1513 if (EXPR_HAS_LOCATION (stmt))
1515 location_t loc = EXPR_LOCATION (stmt);
1516 if (LOCATION_LINE (loc) > 0)
1518 warning (0, "%Hwill never be executed", &loc);
1519 return true;
1523 switch (TREE_CODE (stmt))
1525 case STATEMENT_LIST:
1527 tree_stmt_iterator i;
1528 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1529 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1530 return true;
1532 break;
1534 case COND_EXPR:
1535 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1536 return true;
1537 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1538 return true;
1539 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1540 return true;
1541 break;
1543 case TRY_FINALLY_EXPR:
1544 case TRY_CATCH_EXPR:
1545 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1546 return true;
1547 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1548 return true;
1549 break;
1551 case CATCH_EXPR:
1552 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1553 case EH_FILTER_EXPR:
1554 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1555 case BIND_EXPR:
1556 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1558 default:
1559 /* Not a live container. */
1560 break;
1563 return false;
1566 static void
1567 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1569 tree then_clause, else_clause, cond;
1570 bool save_has_label, then_has_label, else_has_label;
1572 save_has_label = data->has_label;
1573 data->has_label = false;
1574 data->last_goto = NULL;
1576 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1578 then_has_label = data->has_label;
1579 data->has_label = false;
1580 data->last_goto = NULL;
1582 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1584 else_has_label = data->has_label;
1585 data->has_label = save_has_label | then_has_label | else_has_label;
1587 then_clause = COND_EXPR_THEN (*stmt_p);
1588 else_clause = COND_EXPR_ELSE (*stmt_p);
1589 cond = fold (COND_EXPR_COND (*stmt_p));
1591 /* If neither arm does anything at all, we can remove the whole IF. */
1592 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1594 *stmt_p = build_empty_stmt ();
1595 data->repeat = true;
1598 /* If there are no reachable statements in an arm, then we can
1599 zap the entire conditional. */
1600 else if (integer_nonzerop (cond) && !else_has_label)
1602 if (warn_notreached)
1603 remove_useless_stmts_warn_notreached (else_clause);
1604 *stmt_p = then_clause;
1605 data->repeat = true;
1607 else if (integer_zerop (cond) && !then_has_label)
1609 if (warn_notreached)
1610 remove_useless_stmts_warn_notreached (then_clause);
1611 *stmt_p = else_clause;
1612 data->repeat = true;
1615 /* Check a couple of simple things on then/else with single stmts. */
1616 else
1618 tree then_stmt = expr_only (then_clause);
1619 tree else_stmt = expr_only (else_clause);
1621 /* Notice branches to a common destination. */
1622 if (then_stmt && else_stmt
1623 && TREE_CODE (then_stmt) == GOTO_EXPR
1624 && TREE_CODE (else_stmt) == GOTO_EXPR
1625 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1627 *stmt_p = then_stmt;
1628 data->repeat = true;
1631 /* If the THEN/ELSE clause merely assigns a value to a variable or
1632 parameter which is already known to contain that value, then
1633 remove the useless THEN/ELSE clause. */
1634 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1636 if (else_stmt
1637 && TREE_CODE (else_stmt) == MODIFY_EXPR
1638 && TREE_OPERAND (else_stmt, 0) == cond
1639 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1640 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1642 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1643 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1644 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1645 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1647 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1648 ? then_stmt : else_stmt);
1649 tree *location = (TREE_CODE (cond) == EQ_EXPR
1650 ? &COND_EXPR_THEN (*stmt_p)
1651 : &COND_EXPR_ELSE (*stmt_p));
1653 if (stmt
1654 && TREE_CODE (stmt) == MODIFY_EXPR
1655 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1656 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1657 *location = alloc_stmt_list ();
1661 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1662 would be re-introduced during lowering. */
1663 data->last_goto = NULL;
1667 static void
1668 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1670 bool save_may_branch, save_may_throw;
1671 bool this_may_branch, this_may_throw;
1673 /* Collect may_branch and may_throw information for the body only. */
1674 save_may_branch = data->may_branch;
1675 save_may_throw = data->may_throw;
1676 data->may_branch = false;
1677 data->may_throw = false;
1678 data->last_goto = NULL;
1680 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1682 this_may_branch = data->may_branch;
1683 this_may_throw = data->may_throw;
1684 data->may_branch |= save_may_branch;
1685 data->may_throw |= save_may_throw;
1686 data->last_goto = NULL;
1688 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1690 /* If the body is empty, then we can emit the FINALLY block without
1691 the enclosing TRY_FINALLY_EXPR. */
1692 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1694 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1695 data->repeat = true;
1698 /* If the handler is empty, then we can emit the TRY block without
1699 the enclosing TRY_FINALLY_EXPR. */
1700 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1702 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1703 data->repeat = true;
1706 /* If the body neither throws, nor branches, then we can safely
1707 string the TRY and FINALLY blocks together. */
1708 else if (!this_may_branch && !this_may_throw)
1710 tree stmt = *stmt_p;
1711 *stmt_p = TREE_OPERAND (stmt, 0);
1712 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1713 data->repeat = true;
1718 static void
1719 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1721 bool save_may_throw, this_may_throw;
1722 tree_stmt_iterator i;
1723 tree stmt;
1725 /* Collect may_throw information for the body only. */
1726 save_may_throw = data->may_throw;
1727 data->may_throw = false;
1728 data->last_goto = NULL;
1730 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1732 this_may_throw = data->may_throw;
1733 data->may_throw = save_may_throw;
1735 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1736 if (!this_may_throw)
1738 if (warn_notreached)
1739 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1740 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1741 data->repeat = true;
1742 return;
1745 /* Process the catch clause specially. We may be able to tell that
1746 no exceptions propagate past this point. */
1748 this_may_throw = true;
1749 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1750 stmt = tsi_stmt (i);
1751 data->last_goto = NULL;
1753 switch (TREE_CODE (stmt))
1755 case CATCH_EXPR:
1756 for (; !tsi_end_p (i); tsi_next (&i))
1758 stmt = tsi_stmt (i);
1759 /* If we catch all exceptions, then the body does not
1760 propagate exceptions past this point. */
1761 if (CATCH_TYPES (stmt) == NULL)
1762 this_may_throw = false;
1763 data->last_goto = NULL;
1764 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1766 break;
1768 case EH_FILTER_EXPR:
1769 if (EH_FILTER_MUST_NOT_THROW (stmt))
1770 this_may_throw = false;
1771 else if (EH_FILTER_TYPES (stmt) == NULL)
1772 this_may_throw = false;
1773 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1774 break;
1776 default:
1777 /* Otherwise this is a cleanup. */
1778 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1780 /* If the cleanup is empty, then we can emit the TRY block without
1781 the enclosing TRY_CATCH_EXPR. */
1782 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1784 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1785 data->repeat = true;
1787 break;
1789 data->may_throw |= this_may_throw;
1793 static void
1794 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1796 tree block;
1798 /* First remove anything underneath the BIND_EXPR. */
1799 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1801 /* If the BIND_EXPR has no variables, then we can pull everything
1802 up one level and remove the BIND_EXPR, unless this is the toplevel
1803 BIND_EXPR for the current function or an inlined function.
1805 When this situation occurs we will want to apply this
1806 optimization again. */
1807 block = BIND_EXPR_BLOCK (*stmt_p);
1808 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1809 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1810 && (! block
1811 || ! BLOCK_ABSTRACT_ORIGIN (block)
1812 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1813 != FUNCTION_DECL)))
1815 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1816 data->repeat = true;
1821 static void
1822 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1824 tree dest = GOTO_DESTINATION (*stmt_p);
1826 data->may_branch = true;
1827 data->last_goto = NULL;
1829 /* Record the last goto expr, so that we can delete it if unnecessary. */
1830 if (TREE_CODE (dest) == LABEL_DECL)
1831 data->last_goto = stmt_p;
1835 static void
1836 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1838 tree label = LABEL_EXPR_LABEL (*stmt_p);
1840 data->has_label = true;
1842 /* We do want to jump across non-local label receiver code. */
1843 if (DECL_NONLOCAL (label))
1844 data->last_goto = NULL;
1846 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1848 *data->last_goto = build_empty_stmt ();
1849 data->repeat = true;
1852 /* ??? Add something here to delete unused labels. */
1856 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1857 decl. This allows us to eliminate redundant or useless
1858 calls to "const" functions.
1860 Gimplifier already does the same operation, but we may notice functions
1861 being const and pure once their calls has been gimplified, so we need
1862 to update the flag. */
1864 static void
1865 update_call_expr_flags (tree call)
1867 tree decl = get_callee_fndecl (call);
1868 if (!decl)
1869 return;
1870 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1871 TREE_SIDE_EFFECTS (call) = 0;
1872 if (TREE_NOTHROW (decl))
1873 TREE_NOTHROW (call) = 1;
1877 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1879 void
1880 notice_special_calls (tree t)
1882 int flags = call_expr_flags (t);
1884 if (flags & ECF_MAY_BE_ALLOCA)
1885 current_function_calls_alloca = true;
1886 if (flags & ECF_RETURNS_TWICE)
1887 current_function_calls_setjmp = true;
1891 /* Clear flags set by notice_special_calls. Used by dead code removal
1892 to update the flags. */
1894 void
1895 clear_special_calls (void)
1897 current_function_calls_alloca = false;
1898 current_function_calls_setjmp = false;
1902 static void
1903 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1905 tree t = *tp, op;
1907 switch (TREE_CODE (t))
1909 case COND_EXPR:
1910 remove_useless_stmts_cond (tp, data);
1911 break;
1913 case TRY_FINALLY_EXPR:
1914 remove_useless_stmts_tf (tp, data);
1915 break;
1917 case TRY_CATCH_EXPR:
1918 remove_useless_stmts_tc (tp, data);
1919 break;
1921 case BIND_EXPR:
1922 remove_useless_stmts_bind (tp, data);
1923 break;
1925 case GOTO_EXPR:
1926 remove_useless_stmts_goto (tp, data);
1927 break;
1929 case LABEL_EXPR:
1930 remove_useless_stmts_label (tp, data);
1931 break;
1933 case RETURN_EXPR:
1934 fold_stmt (tp);
1935 data->last_goto = NULL;
1936 data->may_branch = true;
1937 break;
1939 case CALL_EXPR:
1940 fold_stmt (tp);
1941 data->last_goto = NULL;
1942 notice_special_calls (t);
1943 update_call_expr_flags (t);
1944 if (tree_could_throw_p (t))
1945 data->may_throw = true;
1946 break;
1948 case MODIFY_EXPR:
1949 data->last_goto = NULL;
1950 fold_stmt (tp);
1951 op = get_call_expr_in (t);
1952 if (op)
1954 update_call_expr_flags (op);
1955 notice_special_calls (op);
1957 if (tree_could_throw_p (t))
1958 data->may_throw = true;
1959 break;
1961 case STATEMENT_LIST:
1963 tree_stmt_iterator i = tsi_start (t);
1964 while (!tsi_end_p (i))
1966 t = tsi_stmt (i);
1967 if (IS_EMPTY_STMT (t))
1969 tsi_delink (&i);
1970 continue;
1973 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1975 t = tsi_stmt (i);
1976 if (TREE_CODE (t) == STATEMENT_LIST)
1978 tsi_link_before (&i, t, TSI_SAME_STMT);
1979 tsi_delink (&i);
1981 else
1982 tsi_next (&i);
1985 break;
1986 case ASM_EXPR:
1987 fold_stmt (tp);
1988 data->last_goto = NULL;
1989 break;
1991 default:
1992 data->last_goto = NULL;
1993 break;
1997 static void
1998 remove_useless_stmts (void)
2000 struct rus_data data;
2002 clear_special_calls ();
2006 memset (&data, 0, sizeof (data));
2007 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
2009 while (data.repeat);
2013 struct tree_opt_pass pass_remove_useless_stmts =
2015 "useless", /* name */
2016 NULL, /* gate */
2017 remove_useless_stmts, /* execute */
2018 NULL, /* sub */
2019 NULL, /* next */
2020 0, /* static_pass_number */
2021 0, /* tv_id */
2022 PROP_gimple_any, /* properties_required */
2023 0, /* properties_provided */
2024 0, /* properties_destroyed */
2025 0, /* todo_flags_start */
2026 TODO_dump_func, /* todo_flags_finish */
2027 0 /* letter */
2030 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2032 static void
2033 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2035 tree phi;
2037 /* Since this block is no longer reachable, we can just delete all
2038 of its PHI nodes. */
2039 phi = phi_nodes (bb);
2040 while (phi)
2042 tree next = PHI_CHAIN (phi);
2043 remove_phi_node (phi, NULL_TREE);
2044 phi = next;
2047 /* Remove edges to BB's successors. */
2048 while (EDGE_COUNT (bb->succs) > 0)
2049 remove_edge (EDGE_SUCC (bb, 0));
2053 /* Remove statements of basic block BB. */
2055 static void
2056 remove_bb (basic_block bb)
2058 block_stmt_iterator i;
2059 #ifdef USE_MAPPED_LOCATION
2060 source_location loc = UNKNOWN_LOCATION;
2061 #else
2062 source_locus loc = 0;
2063 #endif
2065 if (dump_file)
2067 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2068 if (dump_flags & TDF_DETAILS)
2070 dump_bb (bb, dump_file, 0);
2071 fprintf (dump_file, "\n");
2075 /* If we remove the header or the latch of a loop, mark the loop for
2076 removal by setting its header and latch to NULL. */
2077 if (current_loops)
2079 struct loop *loop = bb->loop_father;
2081 if (loop->latch == bb
2082 || loop->header == bb)
2084 loop->latch = NULL;
2085 loop->header = NULL;
2087 /* Also clean up the information associated with the loop. Updating
2088 it would waste time. More importantly, it may refer to ssa
2089 names that were defined in other removed basic block -- these
2090 ssa names are now removed and invalid. */
2091 free_numbers_of_iterations_estimates_loop (loop);
2095 /* Remove all the instructions in the block. */
2096 for (i = bsi_start (bb); !bsi_end_p (i);)
2098 tree stmt = bsi_stmt (i);
2099 if (TREE_CODE (stmt) == LABEL_EXPR
2100 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2101 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2103 basic_block new_bb;
2104 block_stmt_iterator new_bsi;
2106 /* A non-reachable non-local label may still be referenced.
2107 But it no longer needs to carry the extra semantics of
2108 non-locality. */
2109 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2111 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2112 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2115 new_bb = bb->prev_bb;
2116 new_bsi = bsi_start (new_bb);
2117 bsi_remove (&i, false);
2118 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2120 else
2122 /* Release SSA definitions if we are in SSA. Note that we
2123 may be called when not in SSA. For example,
2124 final_cleanup calls this function via
2125 cleanup_tree_cfg. */
2126 if (in_ssa_p)
2127 release_defs (stmt);
2129 bsi_remove (&i, true);
2132 /* Don't warn for removed gotos. Gotos are often removed due to
2133 jump threading, thus resulting in bogus warnings. Not great,
2134 since this way we lose warnings for gotos in the original
2135 program that are indeed unreachable. */
2136 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2138 #ifdef USE_MAPPED_LOCATION
2139 if (EXPR_HAS_LOCATION (stmt))
2140 loc = EXPR_LOCATION (stmt);
2141 #else
2142 source_locus t;
2143 t = EXPR_LOCUS (stmt);
2144 if (t && LOCATION_LINE (*t) > 0)
2145 loc = t;
2146 #endif
2150 /* If requested, give a warning that the first statement in the
2151 block is unreachable. We walk statements backwards in the
2152 loop above, so the last statement we process is the first statement
2153 in the block. */
2154 #ifdef USE_MAPPED_LOCATION
2155 if (loc > BUILTINS_LOCATION)
2156 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2157 #else
2158 if (loc)
2159 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2160 #endif
2162 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2166 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2167 predicate VAL, return the edge that will be taken out of the block.
2168 If VAL does not match a unique edge, NULL is returned. */
2170 edge
2171 find_taken_edge (basic_block bb, tree val)
2173 tree stmt;
2175 stmt = last_stmt (bb);
2177 gcc_assert (stmt);
2178 gcc_assert (is_ctrl_stmt (stmt));
2179 gcc_assert (val);
2181 if (! is_gimple_min_invariant (val))
2182 return NULL;
2184 if (TREE_CODE (stmt) == COND_EXPR)
2185 return find_taken_edge_cond_expr (bb, val);
2187 if (TREE_CODE (stmt) == SWITCH_EXPR)
2188 return find_taken_edge_switch_expr (bb, val);
2190 if (computed_goto_p (stmt))
2191 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2193 gcc_unreachable ();
2196 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2197 statement, determine which of the outgoing edges will be taken out of the
2198 block. Return NULL if either edge may be taken. */
2200 static edge
2201 find_taken_edge_computed_goto (basic_block bb, tree val)
2203 basic_block dest;
2204 edge e = NULL;
2206 dest = label_to_block (val);
2207 if (dest)
2209 e = find_edge (bb, dest);
2210 gcc_assert (e != NULL);
2213 return e;
2216 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2217 statement, determine which of the two edges will be taken out of the
2218 block. Return NULL if either edge may be taken. */
2220 static edge
2221 find_taken_edge_cond_expr (basic_block bb, tree val)
2223 edge true_edge, false_edge;
2225 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2227 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2228 return (zero_p (val) ? false_edge : true_edge);
2231 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2232 statement, determine which edge will be taken out of the block. Return
2233 NULL if any edge may be taken. */
2235 static edge
2236 find_taken_edge_switch_expr (basic_block bb, tree val)
2238 tree switch_expr, taken_case;
2239 basic_block dest_bb;
2240 edge e;
2242 switch_expr = last_stmt (bb);
2243 taken_case = find_case_label_for_value (switch_expr, val);
2244 dest_bb = label_to_block (CASE_LABEL (taken_case));
2246 e = find_edge (bb, dest_bb);
2247 gcc_assert (e);
2248 return e;
2252 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2253 We can make optimal use here of the fact that the case labels are
2254 sorted: We can do a binary search for a case matching VAL. */
2256 static tree
2257 find_case_label_for_value (tree switch_expr, tree val)
2259 tree vec = SWITCH_LABELS (switch_expr);
2260 size_t low, high, n = TREE_VEC_LENGTH (vec);
2261 tree default_case = TREE_VEC_ELT (vec, n - 1);
2263 for (low = -1, high = n - 1; high - low > 1; )
2265 size_t i = (high + low) / 2;
2266 tree t = TREE_VEC_ELT (vec, i);
2267 int cmp;
2269 /* Cache the result of comparing CASE_LOW and val. */
2270 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2272 if (cmp > 0)
2273 high = i;
2274 else
2275 low = i;
2277 if (CASE_HIGH (t) == NULL)
2279 /* A singe-valued case label. */
2280 if (cmp == 0)
2281 return t;
2283 else
2285 /* A case range. We can only handle integer ranges. */
2286 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2287 return t;
2291 return default_case;
2297 /*---------------------------------------------------------------------------
2298 Debugging functions
2299 ---------------------------------------------------------------------------*/
2301 /* Dump tree-specific information of block BB to file OUTF. */
2303 void
2304 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2306 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2310 /* Dump a basic block on stderr. */
2312 void
2313 debug_tree_bb (basic_block bb)
2315 dump_bb (bb, stderr, 0);
2319 /* Dump basic block with index N on stderr. */
2321 basic_block
2322 debug_tree_bb_n (int n)
2324 debug_tree_bb (BASIC_BLOCK (n));
2325 return BASIC_BLOCK (n);
2329 /* Dump the CFG on stderr.
2331 FLAGS are the same used by the tree dumping functions
2332 (see TDF_* in tree.h). */
2334 void
2335 debug_tree_cfg (int flags)
2337 dump_tree_cfg (stderr, flags);
2341 /* Dump the program showing basic block boundaries on the given FILE.
2343 FLAGS are the same used by the tree dumping functions (see TDF_* in
2344 tree.h). */
2346 void
2347 dump_tree_cfg (FILE *file, int flags)
2349 if (flags & TDF_DETAILS)
2351 const char *funcname
2352 = lang_hooks.decl_printable_name (current_function_decl, 2);
2354 fputc ('\n', file);
2355 fprintf (file, ";; Function %s\n\n", funcname);
2356 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2357 n_basic_blocks, n_edges, last_basic_block);
2359 brief_dump_cfg (file);
2360 fprintf (file, "\n");
2363 if (flags & TDF_STATS)
2364 dump_cfg_stats (file);
2366 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2370 /* Dump CFG statistics on FILE. */
2372 void
2373 dump_cfg_stats (FILE *file)
2375 static long max_num_merged_labels = 0;
2376 unsigned long size, total = 0;
2377 long num_edges;
2378 basic_block bb;
2379 const char * const fmt_str = "%-30s%-13s%12s\n";
2380 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2381 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2382 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2383 const char *funcname
2384 = lang_hooks.decl_printable_name (current_function_decl, 2);
2387 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2389 fprintf (file, "---------------------------------------------------------\n");
2390 fprintf (file, fmt_str, "", " Number of ", "Memory");
2391 fprintf (file, fmt_str, "", " instances ", "used ");
2392 fprintf (file, "---------------------------------------------------------\n");
2394 size = n_basic_blocks * sizeof (struct basic_block_def);
2395 total += size;
2396 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2397 SCALE (size), LABEL (size));
2399 num_edges = 0;
2400 FOR_EACH_BB (bb)
2401 num_edges += EDGE_COUNT (bb->succs);
2402 size = num_edges * sizeof (struct edge_def);
2403 total += size;
2404 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2406 fprintf (file, "---------------------------------------------------------\n");
2407 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2408 LABEL (total));
2409 fprintf (file, "---------------------------------------------------------\n");
2410 fprintf (file, "\n");
2412 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2413 max_num_merged_labels = cfg_stats.num_merged_labels;
2415 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2416 cfg_stats.num_merged_labels, max_num_merged_labels);
2418 fprintf (file, "\n");
2422 /* Dump CFG statistics on stderr. Keep extern so that it's always
2423 linked in the final executable. */
2425 void
2426 debug_cfg_stats (void)
2428 dump_cfg_stats (stderr);
2432 /* Dump the flowgraph to a .vcg FILE. */
2434 static void
2435 tree_cfg2vcg (FILE *file)
2437 edge e;
2438 edge_iterator ei;
2439 basic_block bb;
2440 const char *funcname
2441 = lang_hooks.decl_printable_name (current_function_decl, 2);
2443 /* Write the file header. */
2444 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2445 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2446 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2448 /* Write blocks and edges. */
2449 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2451 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2452 e->dest->index);
2454 if (e->flags & EDGE_FAKE)
2455 fprintf (file, " linestyle: dotted priority: 10");
2456 else
2457 fprintf (file, " linestyle: solid priority: 100");
2459 fprintf (file, " }\n");
2461 fputc ('\n', file);
2463 FOR_EACH_BB (bb)
2465 enum tree_code head_code, end_code;
2466 const char *head_name, *end_name;
2467 int head_line = 0;
2468 int end_line = 0;
2469 tree first = first_stmt (bb);
2470 tree last = last_stmt (bb);
2472 if (first)
2474 head_code = TREE_CODE (first);
2475 head_name = tree_code_name[head_code];
2476 head_line = get_lineno (first);
2478 else
2479 head_name = "no-statement";
2481 if (last)
2483 end_code = TREE_CODE (last);
2484 end_name = tree_code_name[end_code];
2485 end_line = get_lineno (last);
2487 else
2488 end_name = "no-statement";
2490 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2491 bb->index, bb->index, head_name, head_line, end_name,
2492 end_line);
2494 FOR_EACH_EDGE (e, ei, bb->succs)
2496 if (e->dest == EXIT_BLOCK_PTR)
2497 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2498 else
2499 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2501 if (e->flags & EDGE_FAKE)
2502 fprintf (file, " priority: 10 linestyle: dotted");
2503 else
2504 fprintf (file, " priority: 100 linestyle: solid");
2506 fprintf (file, " }\n");
2509 if (bb->next_bb != EXIT_BLOCK_PTR)
2510 fputc ('\n', file);
2513 fputs ("}\n\n", file);
2518 /*---------------------------------------------------------------------------
2519 Miscellaneous helpers
2520 ---------------------------------------------------------------------------*/
2522 /* Return true if T represents a stmt that always transfers control. */
2524 bool
2525 is_ctrl_stmt (tree t)
2527 return (TREE_CODE (t) == COND_EXPR
2528 || TREE_CODE (t) == SWITCH_EXPR
2529 || TREE_CODE (t) == GOTO_EXPR
2530 || TREE_CODE (t) == RETURN_EXPR
2531 || TREE_CODE (t) == RESX_EXPR);
2535 /* Return true if T is a statement that may alter the flow of control
2536 (e.g., a call to a non-returning function). */
2538 bool
2539 is_ctrl_altering_stmt (tree t)
2541 tree call;
2543 gcc_assert (t);
2544 call = get_call_expr_in (t);
2545 if (call)
2547 /* A non-pure/const CALL_EXPR alters flow control if the current
2548 function has nonlocal labels. */
2549 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2550 return true;
2552 /* A CALL_EXPR also alters control flow if it does not return. */
2553 if (call_expr_flags (call) & ECF_NORETURN)
2554 return true;
2557 /* OpenMP directives alter control flow. */
2558 if (flag_openmp && OMP_DIRECTIVE_P (t))
2559 return true;
2561 /* If a statement can throw, it alters control flow. */
2562 return tree_can_throw_internal (t);
2566 /* Return true if T is a computed goto. */
2568 bool
2569 computed_goto_p (tree t)
2571 return (TREE_CODE (t) == GOTO_EXPR
2572 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2576 /* Checks whether EXPR is a simple local goto. */
2578 bool
2579 simple_goto_p (tree expr)
2581 return (TREE_CODE (expr) == GOTO_EXPR
2582 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2586 /* Return true if T should start a new basic block. PREV_T is the
2587 statement preceding T. It is used when T is a label or a case label.
2588 Labels should only start a new basic block if their previous statement
2589 wasn't a label. Otherwise, sequence of labels would generate
2590 unnecessary basic blocks that only contain a single label. */
2592 static inline bool
2593 stmt_starts_bb_p (tree t, tree prev_t)
2595 if (t == NULL_TREE)
2596 return false;
2598 /* LABEL_EXPRs start a new basic block only if the preceding
2599 statement wasn't a label of the same type. This prevents the
2600 creation of consecutive blocks that have nothing but a single
2601 label. */
2602 if (TREE_CODE (t) == LABEL_EXPR)
2604 /* Nonlocal and computed GOTO targets always start a new block. */
2605 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2606 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2607 return true;
2609 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2611 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2612 return true;
2614 cfg_stats.num_merged_labels++;
2615 return false;
2617 else
2618 return true;
2621 return false;
2625 /* Return true if T should end a basic block. */
2627 bool
2628 stmt_ends_bb_p (tree t)
2630 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2634 /* Add gotos that used to be represented implicitly in the CFG. */
2636 void
2637 disband_implicit_edges (void)
2639 basic_block bb;
2640 block_stmt_iterator last;
2641 edge e;
2642 edge_iterator ei;
2643 tree stmt, label;
2645 FOR_EACH_BB (bb)
2647 last = bsi_last (bb);
2648 stmt = last_stmt (bb);
2650 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2652 /* Remove superfluous gotos from COND_EXPR branches. Moved
2653 from cfg_remove_useless_stmts here since it violates the
2654 invariants for tree--cfg correspondence and thus fits better
2655 here where we do it anyway. */
2656 e = find_edge (bb, bb->next_bb);
2657 if (e)
2659 if (e->flags & EDGE_TRUE_VALUE)
2660 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2661 else if (e->flags & EDGE_FALSE_VALUE)
2662 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2663 else
2664 gcc_unreachable ();
2665 e->flags |= EDGE_FALLTHRU;
2668 continue;
2671 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2673 /* Remove the RETURN_EXPR if we may fall though to the exit
2674 instead. */
2675 gcc_assert (single_succ_p (bb));
2676 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2678 if (bb->next_bb == EXIT_BLOCK_PTR
2679 && !TREE_OPERAND (stmt, 0))
2681 bsi_remove (&last, true);
2682 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2684 continue;
2687 /* There can be no fallthru edge if the last statement is a control
2688 one. */
2689 if (stmt && is_ctrl_stmt (stmt))
2690 continue;
2692 /* Find a fallthru edge and emit the goto if necessary. */
2693 FOR_EACH_EDGE (e, ei, bb->succs)
2694 if (e->flags & EDGE_FALLTHRU)
2695 break;
2697 if (!e || e->dest == bb->next_bb)
2698 continue;
2700 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2701 label = tree_block_label (e->dest);
2703 stmt = build1 (GOTO_EXPR, void_type_node, label);
2704 #ifdef USE_MAPPED_LOCATION
2705 SET_EXPR_LOCATION (stmt, e->goto_locus);
2706 #else
2707 SET_EXPR_LOCUS (stmt, e->goto_locus);
2708 #endif
2709 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2710 e->flags &= ~EDGE_FALLTHRU;
2714 /* Remove block annotations and other datastructures. */
2716 void
2717 delete_tree_cfg_annotations (void)
2719 label_to_block_map = NULL;
2723 /* Return the first statement in basic block BB. */
2725 tree
2726 first_stmt (basic_block bb)
2728 block_stmt_iterator i = bsi_start (bb);
2729 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2733 /* Return the last statement in basic block BB. */
2735 tree
2736 last_stmt (basic_block bb)
2738 block_stmt_iterator b = bsi_last (bb);
2739 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2743 /* Return a pointer to the last statement in block BB. */
2745 tree *
2746 last_stmt_ptr (basic_block bb)
2748 block_stmt_iterator last = bsi_last (bb);
2749 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2753 /* Return the last statement of an otherwise empty block. Return NULL
2754 if the block is totally empty, or if it contains more than one
2755 statement. */
2757 tree
2758 last_and_only_stmt (basic_block bb)
2760 block_stmt_iterator i = bsi_last (bb);
2761 tree last, prev;
2763 if (bsi_end_p (i))
2764 return NULL_TREE;
2766 last = bsi_stmt (i);
2767 bsi_prev (&i);
2768 if (bsi_end_p (i))
2769 return last;
2771 /* Empty statements should no longer appear in the instruction stream.
2772 Everything that might have appeared before should be deleted by
2773 remove_useless_stmts, and the optimizers should just bsi_remove
2774 instead of smashing with build_empty_stmt.
2776 Thus the only thing that should appear here in a block containing
2777 one executable statement is a label. */
2778 prev = bsi_stmt (i);
2779 if (TREE_CODE (prev) == LABEL_EXPR)
2780 return last;
2781 else
2782 return NULL_TREE;
2786 /* Mark BB as the basic block holding statement T. */
2788 void
2789 set_bb_for_stmt (tree t, basic_block bb)
2791 if (TREE_CODE (t) == PHI_NODE)
2792 PHI_BB (t) = bb;
2793 else if (TREE_CODE (t) == STATEMENT_LIST)
2795 tree_stmt_iterator i;
2796 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2797 set_bb_for_stmt (tsi_stmt (i), bb);
2799 else
2801 stmt_ann_t ann = get_stmt_ann (t);
2802 ann->bb = bb;
2804 /* If the statement is a label, add the label to block-to-labels map
2805 so that we can speed up edge creation for GOTO_EXPRs. */
2806 if (TREE_CODE (t) == LABEL_EXPR)
2808 int uid;
2810 t = LABEL_EXPR_LABEL (t);
2811 uid = LABEL_DECL_UID (t);
2812 if (uid == -1)
2814 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2815 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2816 if (old_len <= (unsigned) uid)
2818 basic_block *addr;
2819 unsigned new_len = 3 * uid / 2;
2821 VEC_safe_grow (basic_block, gc, label_to_block_map,
2822 new_len);
2823 addr = VEC_address (basic_block, label_to_block_map);
2824 memset (&addr[old_len],
2825 0, sizeof (basic_block) * (new_len - old_len));
2828 else
2829 /* We're moving an existing label. Make sure that we've
2830 removed it from the old block. */
2831 gcc_assert (!bb
2832 || !VEC_index (basic_block, label_to_block_map, uid));
2833 VEC_replace (basic_block, label_to_block_map, uid, bb);
2838 /* Finds iterator for STMT. */
2840 extern block_stmt_iterator
2841 bsi_for_stmt (tree stmt)
2843 block_stmt_iterator bsi;
2845 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2846 if (bsi_stmt (bsi) == stmt)
2847 return bsi;
2849 gcc_unreachable ();
2852 /* Mark statement T as modified, and update it. */
2853 static inline void
2854 update_modified_stmts (tree t)
2856 if (TREE_CODE (t) == STATEMENT_LIST)
2858 tree_stmt_iterator i;
2859 tree stmt;
2860 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2862 stmt = tsi_stmt (i);
2863 update_stmt_if_modified (stmt);
2866 else
2867 update_stmt_if_modified (t);
2870 /* Insert statement (or statement list) T before the statement
2871 pointed-to by iterator I. M specifies how to update iterator I
2872 after insertion (see enum bsi_iterator_update). */
2874 void
2875 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2877 set_bb_for_stmt (t, i->bb);
2878 update_modified_stmts (t);
2879 tsi_link_before (&i->tsi, t, m);
2883 /* Insert statement (or statement list) T after the statement
2884 pointed-to by iterator I. M specifies how to update iterator I
2885 after insertion (see enum bsi_iterator_update). */
2887 void
2888 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2890 set_bb_for_stmt (t, i->bb);
2891 update_modified_stmts (t);
2892 tsi_link_after (&i->tsi, t, m);
2896 /* Remove the statement pointed to by iterator I. The iterator is updated
2897 to the next statement.
2899 When REMOVE_EH_INFO is true we remove the statement pointed to by
2900 iterator I from the EH tables. Otherwise we do not modify the EH
2901 tables.
2903 Generally, REMOVE_EH_INFO should be true when the statement is going to
2904 be removed from the IL and not reinserted elsewhere. */
2906 void
2907 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2909 tree t = bsi_stmt (*i);
2910 set_bb_for_stmt (t, NULL);
2911 delink_stmt_imm_use (t);
2912 tsi_delink (&i->tsi);
2913 mark_stmt_modified (t);
2914 if (remove_eh_info)
2915 remove_stmt_from_eh_region (t);
2919 /* Move the statement at FROM so it comes right after the statement at TO. */
2921 void
2922 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2924 tree stmt = bsi_stmt (*from);
2925 bsi_remove (from, false);
2926 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2930 /* Move the statement at FROM so it comes right before the statement at TO. */
2932 void
2933 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2935 tree stmt = bsi_stmt (*from);
2936 bsi_remove (from, false);
2937 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2941 /* Move the statement at FROM to the end of basic block BB. */
2943 void
2944 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2946 block_stmt_iterator last = bsi_last (bb);
2948 /* Have to check bsi_end_p because it could be an empty block. */
2949 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2950 bsi_move_before (from, &last);
2951 else
2952 bsi_move_after (from, &last);
2956 /* Replace the contents of the statement pointed to by iterator BSI
2957 with STMT. If UPDATE_EH_INFO is true, the exception handling
2958 information of the original statement is moved to the new statement. */
2961 void
2962 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2964 int eh_region;
2965 tree orig_stmt = bsi_stmt (*bsi);
2967 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2968 set_bb_for_stmt (stmt, bsi->bb);
2970 /* Preserve EH region information from the original statement, if
2971 requested by the caller. */
2972 if (update_eh_info)
2974 eh_region = lookup_stmt_eh_region (orig_stmt);
2975 if (eh_region >= 0)
2977 remove_stmt_from_eh_region (orig_stmt);
2978 add_stmt_to_eh_region (stmt, eh_region);
2982 delink_stmt_imm_use (orig_stmt);
2983 *bsi_stmt_ptr (*bsi) = stmt;
2984 mark_stmt_modified (stmt);
2985 update_modified_stmts (stmt);
2989 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2990 is made to place the statement in an existing basic block, but
2991 sometimes that isn't possible. When it isn't possible, the edge is
2992 split and the statement is added to the new block.
2994 In all cases, the returned *BSI points to the correct location. The
2995 return value is true if insertion should be done after the location,
2996 or false if it should be done before the location. If new basic block
2997 has to be created, it is stored in *NEW_BB. */
2999 static bool
3000 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3001 basic_block *new_bb)
3003 basic_block dest, src;
3004 tree tmp;
3006 dest = e->dest;
3007 restart:
3009 /* If the destination has one predecessor which has no PHI nodes,
3010 insert there. Except for the exit block.
3012 The requirement for no PHI nodes could be relaxed. Basically we
3013 would have to examine the PHIs to prove that none of them used
3014 the value set by the statement we want to insert on E. That
3015 hardly seems worth the effort. */
3016 if (single_pred_p (dest)
3017 && ! phi_nodes (dest)
3018 && dest != EXIT_BLOCK_PTR)
3020 *bsi = bsi_start (dest);
3021 if (bsi_end_p (*bsi))
3022 return true;
3024 /* Make sure we insert after any leading labels. */
3025 tmp = bsi_stmt (*bsi);
3026 while (TREE_CODE (tmp) == LABEL_EXPR)
3028 bsi_next (bsi);
3029 if (bsi_end_p (*bsi))
3030 break;
3031 tmp = bsi_stmt (*bsi);
3034 if (bsi_end_p (*bsi))
3036 *bsi = bsi_last (dest);
3037 return true;
3039 else
3040 return false;
3043 /* If the source has one successor, the edge is not abnormal and
3044 the last statement does not end a basic block, insert there.
3045 Except for the entry block. */
3046 src = e->src;
3047 if ((e->flags & EDGE_ABNORMAL) == 0
3048 && single_succ_p (src)
3049 && src != ENTRY_BLOCK_PTR)
3051 *bsi = bsi_last (src);
3052 if (bsi_end_p (*bsi))
3053 return true;
3055 tmp = bsi_stmt (*bsi);
3056 if (!stmt_ends_bb_p (tmp))
3057 return true;
3059 /* Insert code just before returning the value. We may need to decompose
3060 the return in the case it contains non-trivial operand. */
3061 if (TREE_CODE (tmp) == RETURN_EXPR)
3063 tree op = TREE_OPERAND (tmp, 0);
3064 if (op && !is_gimple_val (op))
3066 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3067 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3068 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3070 bsi_prev (bsi);
3071 return true;
3075 /* Otherwise, create a new basic block, and split this edge. */
3076 dest = split_edge (e);
3077 if (new_bb)
3078 *new_bb = dest;
3079 e = single_pred_edge (dest);
3080 goto restart;
3084 /* This routine will commit all pending edge insertions, creating any new
3085 basic blocks which are necessary. */
3087 void
3088 bsi_commit_edge_inserts (void)
3090 basic_block bb;
3091 edge e;
3092 edge_iterator ei;
3094 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3096 FOR_EACH_BB (bb)
3097 FOR_EACH_EDGE (e, ei, bb->succs)
3098 bsi_commit_one_edge_insert (e, NULL);
3102 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3103 to this block, otherwise set it to NULL. */
3105 void
3106 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3108 if (new_bb)
3109 *new_bb = NULL;
3110 if (PENDING_STMT (e))
3112 block_stmt_iterator bsi;
3113 tree stmt = PENDING_STMT (e);
3115 PENDING_STMT (e) = NULL_TREE;
3117 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3118 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3119 else
3120 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3125 /* Add STMT to the pending list of edge E. No actual insertion is
3126 made until a call to bsi_commit_edge_inserts () is made. */
3128 void
3129 bsi_insert_on_edge (edge e, tree stmt)
3131 append_to_statement_list (stmt, &PENDING_STMT (e));
3134 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3135 block has to be created, it is returned. */
3137 basic_block
3138 bsi_insert_on_edge_immediate (edge e, tree stmt)
3140 block_stmt_iterator bsi;
3141 basic_block new_bb = NULL;
3143 gcc_assert (!PENDING_STMT (e));
3145 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3146 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3147 else
3148 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3150 return new_bb;
3153 /*---------------------------------------------------------------------------
3154 Tree specific functions for CFG manipulation
3155 ---------------------------------------------------------------------------*/
3157 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3159 static void
3160 reinstall_phi_args (edge new_edge, edge old_edge)
3162 tree var, phi;
3164 if (!PENDING_STMT (old_edge))
3165 return;
3167 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3168 var && phi;
3169 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3171 tree result = TREE_PURPOSE (var);
3172 tree arg = TREE_VALUE (var);
3174 gcc_assert (result == PHI_RESULT (phi));
3176 add_phi_arg (phi, arg, new_edge);
3179 PENDING_STMT (old_edge) = NULL;
3182 /* Returns the basic block after that the new basic block created
3183 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3184 near its "logical" location. This is of most help to humans looking
3185 at debugging dumps. */
3187 static basic_block
3188 split_edge_bb_loc (edge edge_in)
3190 basic_block dest = edge_in->dest;
3192 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3193 return edge_in->src;
3194 else
3195 return dest->prev_bb;
3198 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3199 Abort on abnormal edges. */
3201 static basic_block
3202 tree_split_edge (edge edge_in)
3204 basic_block new_bb, after_bb, dest, src;
3205 edge new_edge, e;
3207 /* Abnormal edges cannot be split. */
3208 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3210 src = edge_in->src;
3211 dest = edge_in->dest;
3213 after_bb = split_edge_bb_loc (edge_in);
3215 new_bb = create_empty_bb (after_bb);
3216 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3217 new_bb->count = edge_in->count;
3218 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3219 new_edge->probability = REG_BR_PROB_BASE;
3220 new_edge->count = edge_in->count;
3222 e = redirect_edge_and_branch (edge_in, new_bb);
3223 gcc_assert (e);
3224 reinstall_phi_args (new_edge, e);
3226 return new_bb;
3230 /* Return true when BB has label LABEL in it. */
3232 static bool
3233 has_label_p (basic_block bb, tree label)
3235 block_stmt_iterator bsi;
3237 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3239 tree stmt = bsi_stmt (bsi);
3241 if (TREE_CODE (stmt) != LABEL_EXPR)
3242 return false;
3243 if (LABEL_EXPR_LABEL (stmt) == label)
3244 return true;
3246 return false;
3250 /* Callback for walk_tree, check that all elements with address taken are
3251 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3252 inside a PHI node. */
3254 static tree
3255 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3257 tree t = *tp, x;
3258 bool in_phi = (data != NULL);
3260 if (TYPE_P (t))
3261 *walk_subtrees = 0;
3263 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3264 #define CHECK_OP(N, MSG) \
3265 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3266 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3268 switch (TREE_CODE (t))
3270 case SSA_NAME:
3271 if (SSA_NAME_IN_FREE_LIST (t))
3273 error ("SSA name in freelist but still referenced");
3274 return *tp;
3276 break;
3278 case ASSERT_EXPR:
3279 x = fold (ASSERT_EXPR_COND (t));
3280 if (x == boolean_false_node)
3282 error ("ASSERT_EXPR with an always-false condition");
3283 return *tp;
3285 break;
3287 case MODIFY_EXPR:
3288 x = TREE_OPERAND (t, 0);
3289 if (TREE_CODE (x) == BIT_FIELD_REF
3290 && is_gimple_reg (TREE_OPERAND (x, 0)))
3292 error ("GIMPLE register modified with BIT_FIELD_REF");
3293 return t;
3295 break;
3297 case ADDR_EXPR:
3299 bool old_invariant;
3300 bool old_constant;
3301 bool old_side_effects;
3302 bool new_invariant;
3303 bool new_constant;
3304 bool new_side_effects;
3306 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3307 dead PHIs that take the address of something. But if the PHI
3308 result is dead, the fact that it takes the address of anything
3309 is irrelevant. Because we can not tell from here if a PHI result
3310 is dead, we just skip this check for PHIs altogether. This means
3311 we may be missing "valid" checks, but what can you do?
3312 This was PR19217. */
3313 if (in_phi)
3314 break;
3316 old_invariant = TREE_INVARIANT (t);
3317 old_constant = TREE_CONSTANT (t);
3318 old_side_effects = TREE_SIDE_EFFECTS (t);
3320 recompute_tree_invariant_for_addr_expr (t);
3321 new_invariant = TREE_INVARIANT (t);
3322 new_side_effects = TREE_SIDE_EFFECTS (t);
3323 new_constant = TREE_CONSTANT (t);
3325 if (old_invariant != new_invariant)
3327 error ("invariant not recomputed when ADDR_EXPR changed");
3328 return t;
3331 if (old_constant != new_constant)
3333 error ("constant not recomputed when ADDR_EXPR changed");
3334 return t;
3336 if (old_side_effects != new_side_effects)
3338 error ("side effects not recomputed when ADDR_EXPR changed");
3339 return t;
3342 /* Skip any references (they will be checked when we recurse down the
3343 tree) and ensure that any variable used as a prefix is marked
3344 addressable. */
3345 for (x = TREE_OPERAND (t, 0);
3346 handled_component_p (x);
3347 x = TREE_OPERAND (x, 0))
3350 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3351 return NULL;
3352 if (!TREE_ADDRESSABLE (x))
3354 error ("address taken, but ADDRESSABLE bit not set");
3355 return x;
3357 break;
3360 case COND_EXPR:
3361 x = COND_EXPR_COND (t);
3362 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3364 error ("non-boolean used in condition");
3365 return x;
3367 if (!is_gimple_condexpr (x))
3369 error ("invalid conditional operand");
3370 return x;
3372 break;
3374 case NOP_EXPR:
3375 case CONVERT_EXPR:
3376 case FIX_TRUNC_EXPR:
3377 case FIX_CEIL_EXPR:
3378 case FIX_FLOOR_EXPR:
3379 case FIX_ROUND_EXPR:
3380 case FLOAT_EXPR:
3381 case NEGATE_EXPR:
3382 case ABS_EXPR:
3383 case BIT_NOT_EXPR:
3384 case NON_LVALUE_EXPR:
3385 case TRUTH_NOT_EXPR:
3386 CHECK_OP (0, "invalid operand to unary operator");
3387 break;
3389 case REALPART_EXPR:
3390 case IMAGPART_EXPR:
3391 case COMPONENT_REF:
3392 case ARRAY_REF:
3393 case ARRAY_RANGE_REF:
3394 case BIT_FIELD_REF:
3395 case VIEW_CONVERT_EXPR:
3396 /* We have a nest of references. Verify that each of the operands
3397 that determine where to reference is either a constant or a variable,
3398 verify that the base is valid, and then show we've already checked
3399 the subtrees. */
3400 while (handled_component_p (t))
3402 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3403 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3404 else if (TREE_CODE (t) == ARRAY_REF
3405 || TREE_CODE (t) == ARRAY_RANGE_REF)
3407 CHECK_OP (1, "invalid array index");
3408 if (TREE_OPERAND (t, 2))
3409 CHECK_OP (2, "invalid array lower bound");
3410 if (TREE_OPERAND (t, 3))
3411 CHECK_OP (3, "invalid array stride");
3413 else if (TREE_CODE (t) == BIT_FIELD_REF)
3415 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3416 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3419 t = TREE_OPERAND (t, 0);
3422 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3424 error ("invalid reference prefix");
3425 return t;
3427 *walk_subtrees = 0;
3428 break;
3430 case LT_EXPR:
3431 case LE_EXPR:
3432 case GT_EXPR:
3433 case GE_EXPR:
3434 case EQ_EXPR:
3435 case NE_EXPR:
3436 case UNORDERED_EXPR:
3437 case ORDERED_EXPR:
3438 case UNLT_EXPR:
3439 case UNLE_EXPR:
3440 case UNGT_EXPR:
3441 case UNGE_EXPR:
3442 case UNEQ_EXPR:
3443 case LTGT_EXPR:
3444 case PLUS_EXPR:
3445 case MINUS_EXPR:
3446 case MULT_EXPR:
3447 case TRUNC_DIV_EXPR:
3448 case CEIL_DIV_EXPR:
3449 case FLOOR_DIV_EXPR:
3450 case ROUND_DIV_EXPR:
3451 case TRUNC_MOD_EXPR:
3452 case CEIL_MOD_EXPR:
3453 case FLOOR_MOD_EXPR:
3454 case ROUND_MOD_EXPR:
3455 case RDIV_EXPR:
3456 case EXACT_DIV_EXPR:
3457 case MIN_EXPR:
3458 case MAX_EXPR:
3459 case LSHIFT_EXPR:
3460 case RSHIFT_EXPR:
3461 case LROTATE_EXPR:
3462 case RROTATE_EXPR:
3463 case BIT_IOR_EXPR:
3464 case BIT_XOR_EXPR:
3465 case BIT_AND_EXPR:
3466 CHECK_OP (0, "invalid operand to binary operator");
3467 CHECK_OP (1, "invalid operand to binary operator");
3468 break;
3470 default:
3471 break;
3473 return NULL;
3475 #undef CHECK_OP
3479 /* Verify STMT, return true if STMT is not in GIMPLE form.
3480 TODO: Implement type checking. */
3482 static bool
3483 verify_stmt (tree stmt, bool last_in_block)
3485 tree addr;
3487 if (OMP_DIRECTIVE_P (stmt))
3489 /* OpenMP directives are validated by the FE and never operated
3490 on by the optimizers. Furthermore, OMP_FOR may contain
3491 non-gimple expressions when the main index variable has had
3492 its address taken. This does not affect the loop itself
3493 because the header of an OMP_FOR is merely used to determine
3494 how to setup the parallel iteration. */
3495 return false;
3498 if (!is_gimple_stmt (stmt))
3500 error ("is not a valid GIMPLE statement");
3501 goto fail;
3504 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3505 if (addr)
3507 debug_generic_stmt (addr);
3508 return true;
3511 /* If the statement is marked as part of an EH region, then it is
3512 expected that the statement could throw. Verify that when we
3513 have optimizations that simplify statements such that we prove
3514 that they cannot throw, that we update other data structures
3515 to match. */
3516 if (lookup_stmt_eh_region (stmt) >= 0)
3518 if (!tree_could_throw_p (stmt))
3520 error ("statement marked for throw, but doesn%'t");
3521 goto fail;
3523 if (!last_in_block && tree_can_throw_internal (stmt))
3525 error ("statement marked for throw in middle of block");
3526 goto fail;
3530 return false;
3532 fail:
3533 debug_generic_stmt (stmt);
3534 return true;
3538 /* Return true when the T can be shared. */
3540 static bool
3541 tree_node_can_be_shared (tree t)
3543 if (IS_TYPE_OR_DECL_P (t)
3544 || is_gimple_min_invariant (t)
3545 || TREE_CODE (t) == SSA_NAME
3546 || t == error_mark_node
3547 || TREE_CODE (t) == IDENTIFIER_NODE)
3548 return true;
3550 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3551 return true;
3553 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3554 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3555 || TREE_CODE (t) == COMPONENT_REF
3556 || TREE_CODE (t) == REALPART_EXPR
3557 || TREE_CODE (t) == IMAGPART_EXPR)
3558 t = TREE_OPERAND (t, 0);
3560 if (DECL_P (t))
3561 return true;
3563 return false;
3567 /* Called via walk_trees. Verify tree sharing. */
3569 static tree
3570 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3572 htab_t htab = (htab_t) data;
3573 void **slot;
3575 if (tree_node_can_be_shared (*tp))
3577 *walk_subtrees = false;
3578 return NULL;
3581 slot = htab_find_slot (htab, *tp, INSERT);
3582 if (*slot)
3583 return (tree) *slot;
3584 *slot = *tp;
3586 return NULL;
3590 /* Verify the GIMPLE statement chain. */
3592 void
3593 verify_stmts (void)
3595 basic_block bb;
3596 block_stmt_iterator bsi;
3597 bool err = false;
3598 htab_t htab;
3599 tree addr;
3601 timevar_push (TV_TREE_STMT_VERIFY);
3602 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3604 FOR_EACH_BB (bb)
3606 tree phi;
3607 int i;
3609 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3611 int phi_num_args = PHI_NUM_ARGS (phi);
3613 if (bb_for_stmt (phi) != bb)
3615 error ("bb_for_stmt (phi) is set to a wrong basic block");
3616 err |= true;
3619 for (i = 0; i < phi_num_args; i++)
3621 tree t = PHI_ARG_DEF (phi, i);
3622 tree addr;
3624 /* Addressable variables do have SSA_NAMEs but they
3625 are not considered gimple values. */
3626 if (TREE_CODE (t) != SSA_NAME
3627 && TREE_CODE (t) != FUNCTION_DECL
3628 && !is_gimple_val (t))
3630 error ("PHI def is not a GIMPLE value");
3631 debug_generic_stmt (phi);
3632 debug_generic_stmt (t);
3633 err |= true;
3636 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3637 if (addr)
3639 debug_generic_stmt (addr);
3640 err |= true;
3643 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3644 if (addr)
3646 error ("incorrect sharing of tree nodes");
3647 debug_generic_stmt (phi);
3648 debug_generic_stmt (addr);
3649 err |= true;
3654 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3656 tree stmt = bsi_stmt (bsi);
3658 if (bb_for_stmt (stmt) != bb)
3660 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3661 err |= true;
3664 bsi_next (&bsi);
3665 err |= verify_stmt (stmt, bsi_end_p (bsi));
3666 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3667 if (addr)
3669 error ("incorrect sharing of tree nodes");
3670 debug_generic_stmt (stmt);
3671 debug_generic_stmt (addr);
3672 err |= true;
3677 if (err)
3678 internal_error ("verify_stmts failed");
3680 htab_delete (htab);
3681 timevar_pop (TV_TREE_STMT_VERIFY);
3685 /* Verifies that the flow information is OK. */
3687 static int
3688 tree_verify_flow_info (void)
3690 int err = 0;
3691 basic_block bb;
3692 block_stmt_iterator bsi;
3693 tree stmt;
3694 edge e;
3695 edge_iterator ei;
3697 if (ENTRY_BLOCK_PTR->stmt_list)
3699 error ("ENTRY_BLOCK has a statement list associated with it");
3700 err = 1;
3703 if (EXIT_BLOCK_PTR->stmt_list)
3705 error ("EXIT_BLOCK has a statement list associated with it");
3706 err = 1;
3709 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3710 if (e->flags & EDGE_FALLTHRU)
3712 error ("fallthru to exit from bb %d", e->src->index);
3713 err = 1;
3716 FOR_EACH_BB (bb)
3718 bool found_ctrl_stmt = false;
3720 stmt = NULL_TREE;
3722 /* Skip labels on the start of basic block. */
3723 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3725 tree prev_stmt = stmt;
3727 stmt = bsi_stmt (bsi);
3729 if (TREE_CODE (stmt) != LABEL_EXPR)
3730 break;
3732 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3734 error ("nonlocal label ");
3735 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3736 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3737 bb->index);
3738 err = 1;
3741 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3743 error ("label ");
3744 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3745 fprintf (stderr, " to block does not match in bb %d",
3746 bb->index);
3747 err = 1;
3750 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3751 != current_function_decl)
3753 error ("label ");
3754 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3755 fprintf (stderr, " has incorrect context in bb %d",
3756 bb->index);
3757 err = 1;
3761 /* Verify that body of basic block BB is free of control flow. */
3762 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3764 tree stmt = bsi_stmt (bsi);
3766 if (found_ctrl_stmt)
3768 error ("control flow in the middle of basic block %d",
3769 bb->index);
3770 err = 1;
3773 if (stmt_ends_bb_p (stmt))
3774 found_ctrl_stmt = true;
3776 if (TREE_CODE (stmt) == LABEL_EXPR)
3778 error ("label ");
3779 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3780 fprintf (stderr, " in the middle of basic block %d", bb->index);
3781 err = 1;
3785 bsi = bsi_last (bb);
3786 if (bsi_end_p (bsi))
3787 continue;
3789 stmt = bsi_stmt (bsi);
3791 err |= verify_eh_edges (stmt);
3793 if (is_ctrl_stmt (stmt))
3795 FOR_EACH_EDGE (e, ei, bb->succs)
3796 if (e->flags & EDGE_FALLTHRU)
3798 error ("fallthru edge after a control statement in bb %d",
3799 bb->index);
3800 err = 1;
3804 switch (TREE_CODE (stmt))
3806 case COND_EXPR:
3808 edge true_edge;
3809 edge false_edge;
3810 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3811 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3813 error ("structured COND_EXPR at the end of bb %d", bb->index);
3814 err = 1;
3817 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3819 if (!true_edge || !false_edge
3820 || !(true_edge->flags & EDGE_TRUE_VALUE)
3821 || !(false_edge->flags & EDGE_FALSE_VALUE)
3822 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3823 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3824 || EDGE_COUNT (bb->succs) >= 3)
3826 error ("wrong outgoing edge flags at end of bb %d",
3827 bb->index);
3828 err = 1;
3831 if (!has_label_p (true_edge->dest,
3832 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3834 error ("%<then%> label does not match edge at end of bb %d",
3835 bb->index);
3836 err = 1;
3839 if (!has_label_p (false_edge->dest,
3840 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3842 error ("%<else%> label does not match edge at end of bb %d",
3843 bb->index);
3844 err = 1;
3847 break;
3849 case GOTO_EXPR:
3850 if (simple_goto_p (stmt))
3852 error ("explicit goto at end of bb %d", bb->index);
3853 err = 1;
3855 else
3857 /* FIXME. We should double check that the labels in the
3858 destination blocks have their address taken. */
3859 FOR_EACH_EDGE (e, ei, bb->succs)
3860 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3861 | EDGE_FALSE_VALUE))
3862 || !(e->flags & EDGE_ABNORMAL))
3864 error ("wrong outgoing edge flags at end of bb %d",
3865 bb->index);
3866 err = 1;
3869 break;
3871 case RETURN_EXPR:
3872 if (!single_succ_p (bb)
3873 || (single_succ_edge (bb)->flags
3874 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3875 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3877 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3878 err = 1;
3880 if (single_succ (bb) != EXIT_BLOCK_PTR)
3882 error ("return edge does not point to exit in bb %d",
3883 bb->index);
3884 err = 1;
3886 break;
3888 case SWITCH_EXPR:
3890 tree prev;
3891 edge e;
3892 size_t i, n;
3893 tree vec;
3895 vec = SWITCH_LABELS (stmt);
3896 n = TREE_VEC_LENGTH (vec);
3898 /* Mark all the destination basic blocks. */
3899 for (i = 0; i < n; ++i)
3901 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3902 basic_block label_bb = label_to_block (lab);
3904 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3905 label_bb->aux = (void *)1;
3908 /* Verify that the case labels are sorted. */
3909 prev = TREE_VEC_ELT (vec, 0);
3910 for (i = 1; i < n - 1; ++i)
3912 tree c = TREE_VEC_ELT (vec, i);
3913 if (! CASE_LOW (c))
3915 error ("found default case not at end of case vector");
3916 err = 1;
3917 continue;
3919 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3921 error ("case labels not sorted: ");
3922 print_generic_expr (stderr, prev, 0);
3923 fprintf (stderr," is greater than ");
3924 print_generic_expr (stderr, c, 0);
3925 fprintf (stderr," but comes before it.\n");
3926 err = 1;
3928 prev = c;
3930 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3932 error ("no default case found at end of case vector");
3933 err = 1;
3936 FOR_EACH_EDGE (e, ei, bb->succs)
3938 if (!e->dest->aux)
3940 error ("extra outgoing edge %d->%d",
3941 bb->index, e->dest->index);
3942 err = 1;
3944 e->dest->aux = (void *)2;
3945 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3946 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3948 error ("wrong outgoing edge flags at end of bb %d",
3949 bb->index);
3950 err = 1;
3954 /* Check that we have all of them. */
3955 for (i = 0; i < n; ++i)
3957 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3958 basic_block label_bb = label_to_block (lab);
3960 if (label_bb->aux != (void *)2)
3962 error ("missing edge %i->%i",
3963 bb->index, label_bb->index);
3964 err = 1;
3968 FOR_EACH_EDGE (e, ei, bb->succs)
3969 e->dest->aux = (void *)0;
3972 default: ;
3976 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3977 verify_dominators (CDI_DOMINATORS);
3979 return err;
3983 /* Updates phi nodes after creating a forwarder block joined
3984 by edge FALLTHRU. */
3986 static void
3987 tree_make_forwarder_block (edge fallthru)
3989 edge e;
3990 edge_iterator ei;
3991 basic_block dummy, bb;
3992 tree phi, new_phi, var;
3994 dummy = fallthru->src;
3995 bb = fallthru->dest;
3997 if (single_pred_p (bb))
3998 return;
4000 /* If we redirected a branch we must create new phi nodes at the
4001 start of BB. */
4002 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4004 var = PHI_RESULT (phi);
4005 new_phi = create_phi_node (var, bb);
4006 SSA_NAME_DEF_STMT (var) = new_phi;
4007 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4008 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4011 /* Ensure that the PHI node chain is in the same order. */
4012 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4014 /* Add the arguments we have stored on edges. */
4015 FOR_EACH_EDGE (e, ei, bb->preds)
4017 if (e == fallthru)
4018 continue;
4020 flush_pending_stmts (e);
4025 /* Return a non-special label in the head of basic block BLOCK.
4026 Create one if it doesn't exist. */
4028 tree
4029 tree_block_label (basic_block bb)
4031 block_stmt_iterator i, s = bsi_start (bb);
4032 bool first = true;
4033 tree label, stmt;
4035 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4037 stmt = bsi_stmt (i);
4038 if (TREE_CODE (stmt) != LABEL_EXPR)
4039 break;
4040 label = LABEL_EXPR_LABEL (stmt);
4041 if (!DECL_NONLOCAL (label))
4043 if (!first)
4044 bsi_move_before (&i, &s);
4045 return label;
4049 label = create_artificial_label ();
4050 stmt = build1 (LABEL_EXPR, void_type_node, label);
4051 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4052 return label;
4056 /* Attempt to perform edge redirection by replacing a possibly complex
4057 jump instruction by a goto or by removing the jump completely.
4058 This can apply only if all edges now point to the same block. The
4059 parameters and return values are equivalent to
4060 redirect_edge_and_branch. */
4062 static edge
4063 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4065 basic_block src = e->src;
4066 block_stmt_iterator b;
4067 tree stmt;
4069 /* We can replace or remove a complex jump only when we have exactly
4070 two edges. */
4071 if (EDGE_COUNT (src->succs) != 2
4072 /* Verify that all targets will be TARGET. Specifically, the
4073 edge that is not E must also go to TARGET. */
4074 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4075 return NULL;
4077 b = bsi_last (src);
4078 if (bsi_end_p (b))
4079 return NULL;
4080 stmt = bsi_stmt (b);
4082 if (TREE_CODE (stmt) == COND_EXPR
4083 || TREE_CODE (stmt) == SWITCH_EXPR)
4085 bsi_remove (&b, true);
4086 e = ssa_redirect_edge (e, target);
4087 e->flags = EDGE_FALLTHRU;
4088 return e;
4091 return NULL;
4095 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4096 edge representing the redirected branch. */
4098 static edge
4099 tree_redirect_edge_and_branch (edge e, basic_block dest)
4101 basic_block bb = e->src;
4102 block_stmt_iterator bsi;
4103 edge ret;
4104 tree label, stmt;
4106 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4107 return NULL;
4109 if (e->src != ENTRY_BLOCK_PTR
4110 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4111 return ret;
4113 if (e->dest == dest)
4114 return NULL;
4116 label = tree_block_label (dest);
4118 bsi = bsi_last (bb);
4119 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4121 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4123 case COND_EXPR:
4124 stmt = (e->flags & EDGE_TRUE_VALUE
4125 ? COND_EXPR_THEN (stmt)
4126 : COND_EXPR_ELSE (stmt));
4127 GOTO_DESTINATION (stmt) = label;
4128 break;
4130 case GOTO_EXPR:
4131 /* No non-abnormal edges should lead from a non-simple goto, and
4132 simple ones should be represented implicitly. */
4133 gcc_unreachable ();
4135 case SWITCH_EXPR:
4137 tree cases = get_cases_for_edge (e, stmt);
4139 /* If we have a list of cases associated with E, then use it
4140 as it's a lot faster than walking the entire case vector. */
4141 if (cases)
4143 edge e2 = find_edge (e->src, dest);
4144 tree last, first;
4146 first = cases;
4147 while (cases)
4149 last = cases;
4150 CASE_LABEL (cases) = label;
4151 cases = TREE_CHAIN (cases);
4154 /* If there was already an edge in the CFG, then we need
4155 to move all the cases associated with E to E2. */
4156 if (e2)
4158 tree cases2 = get_cases_for_edge (e2, stmt);
4160 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4161 TREE_CHAIN (cases2) = first;
4164 else
4166 tree vec = SWITCH_LABELS (stmt);
4167 size_t i, n = TREE_VEC_LENGTH (vec);
4169 for (i = 0; i < n; i++)
4171 tree elt = TREE_VEC_ELT (vec, i);
4173 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4174 CASE_LABEL (elt) = label;
4178 break;
4181 case RETURN_EXPR:
4182 bsi_remove (&bsi, true);
4183 e->flags |= EDGE_FALLTHRU;
4184 break;
4186 default:
4187 /* Otherwise it must be a fallthru edge, and we don't need to
4188 do anything besides redirecting it. */
4189 gcc_assert (e->flags & EDGE_FALLTHRU);
4190 break;
4193 /* Update/insert PHI nodes as necessary. */
4195 /* Now update the edges in the CFG. */
4196 e = ssa_redirect_edge (e, dest);
4198 return e;
4202 /* Simple wrapper, as we can always redirect fallthru edges. */
4204 static basic_block
4205 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4207 e = tree_redirect_edge_and_branch (e, dest);
4208 gcc_assert (e);
4210 return NULL;
4214 /* Splits basic block BB after statement STMT (but at least after the
4215 labels). If STMT is NULL, BB is split just after the labels. */
4217 static basic_block
4218 tree_split_block (basic_block bb, void *stmt)
4220 block_stmt_iterator bsi, bsi_tgt;
4221 tree act;
4222 basic_block new_bb;
4223 edge e;
4224 edge_iterator ei;
4226 new_bb = create_empty_bb (bb);
4228 /* Redirect the outgoing edges. */
4229 new_bb->succs = bb->succs;
4230 bb->succs = NULL;
4231 FOR_EACH_EDGE (e, ei, new_bb->succs)
4232 e->src = new_bb;
4234 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4235 stmt = NULL;
4237 /* Move everything from BSI to the new basic block. */
4238 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4240 act = bsi_stmt (bsi);
4241 if (TREE_CODE (act) == LABEL_EXPR)
4242 continue;
4244 if (!stmt)
4245 break;
4247 if (stmt == act)
4249 bsi_next (&bsi);
4250 break;
4254 bsi_tgt = bsi_start (new_bb);
4255 while (!bsi_end_p (bsi))
4257 act = bsi_stmt (bsi);
4258 bsi_remove (&bsi, false);
4259 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4262 return new_bb;
4266 /* Moves basic block BB after block AFTER. */
4268 static bool
4269 tree_move_block_after (basic_block bb, basic_block after)
4271 if (bb->prev_bb == after)
4272 return true;
4274 unlink_block (bb);
4275 link_block (bb, after);
4277 return true;
4281 /* Return true if basic_block can be duplicated. */
4283 static bool
4284 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4286 return true;
4290 /* Create a duplicate of the basic block BB. NOTE: This does not
4291 preserve SSA form. */
4293 static basic_block
4294 tree_duplicate_bb (basic_block bb)
4296 basic_block new_bb;
4297 block_stmt_iterator bsi, bsi_tgt;
4298 tree phi;
4300 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4302 /* Copy the PHI nodes. We ignore PHI node arguments here because
4303 the incoming edges have not been setup yet. */
4304 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4306 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4307 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4310 /* Keep the chain of PHI nodes in the same order so that they can be
4311 updated by ssa_redirect_edge. */
4312 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4314 bsi_tgt = bsi_start (new_bb);
4315 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4317 def_operand_p def_p;
4318 ssa_op_iter op_iter;
4319 tree stmt, copy;
4320 int region;
4322 stmt = bsi_stmt (bsi);
4323 if (TREE_CODE (stmt) == LABEL_EXPR)
4324 continue;
4326 /* Create a new copy of STMT and duplicate STMT's virtual
4327 operands. */
4328 copy = unshare_expr (stmt);
4329 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4330 copy_virtual_operands (copy, stmt);
4331 region = lookup_stmt_eh_region (stmt);
4332 if (region >= 0)
4333 add_stmt_to_eh_region (copy, region);
4335 /* Create new names for all the definitions created by COPY and
4336 add replacement mappings for each new name. */
4337 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4338 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4341 return new_bb;
4345 /* Basic block BB_COPY was created by code duplication. Add phi node
4346 arguments for edges going out of BB_COPY. The blocks that were
4347 duplicated have BB_DUPLICATED set. */
4349 void
4350 add_phi_args_after_copy_bb (basic_block bb_copy)
4352 basic_block bb, dest;
4353 edge e, e_copy;
4354 edge_iterator ei;
4355 tree phi, phi_copy, phi_next, def;
4357 bb = get_bb_original (bb_copy);
4359 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4361 if (!phi_nodes (e_copy->dest))
4362 continue;
4364 if (e_copy->dest->flags & BB_DUPLICATED)
4365 dest = get_bb_original (e_copy->dest);
4366 else
4367 dest = e_copy->dest;
4369 e = find_edge (bb, dest);
4370 if (!e)
4372 /* During loop unrolling the target of the latch edge is copied.
4373 In this case we are not looking for edge to dest, but to
4374 duplicated block whose original was dest. */
4375 FOR_EACH_EDGE (e, ei, bb->succs)
4376 if ((e->dest->flags & BB_DUPLICATED)
4377 && get_bb_original (e->dest) == dest)
4378 break;
4380 gcc_assert (e != NULL);
4383 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4384 phi;
4385 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4387 phi_next = PHI_CHAIN (phi);
4388 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4389 add_phi_arg (phi_copy, def, e_copy);
4394 /* Blocks in REGION_COPY array of length N_REGION were created by
4395 duplication of basic blocks. Add phi node arguments for edges
4396 going from these blocks. */
4398 void
4399 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4401 unsigned i;
4403 for (i = 0; i < n_region; i++)
4404 region_copy[i]->flags |= BB_DUPLICATED;
4406 for (i = 0; i < n_region; i++)
4407 add_phi_args_after_copy_bb (region_copy[i]);
4409 for (i = 0; i < n_region; i++)
4410 region_copy[i]->flags &= ~BB_DUPLICATED;
4413 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4414 important exit edge EXIT. By important we mean that no SSA name defined
4415 inside region is live over the other exit edges of the region. All entry
4416 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4417 to the duplicate of the region. SSA form, dominance and loop information
4418 is updated. The new basic blocks are stored to REGION_COPY in the same
4419 order as they had in REGION, provided that REGION_COPY is not NULL.
4420 The function returns false if it is unable to copy the region,
4421 true otherwise. */
4423 bool
4424 tree_duplicate_sese_region (edge entry, edge exit,
4425 basic_block *region, unsigned n_region,
4426 basic_block *region_copy)
4428 unsigned i, n_doms;
4429 bool free_region_copy = false, copying_header = false;
4430 struct loop *loop = entry->dest->loop_father;
4431 edge exit_copy;
4432 basic_block *doms;
4433 edge redirected;
4434 int total_freq = 0, entry_freq = 0;
4435 gcov_type total_count = 0, entry_count = 0;
4437 if (!can_copy_bbs_p (region, n_region))
4438 return false;
4440 /* Some sanity checking. Note that we do not check for all possible
4441 missuses of the functions. I.e. if you ask to copy something weird,
4442 it will work, but the state of structures probably will not be
4443 correct. */
4444 for (i = 0; i < n_region; i++)
4446 /* We do not handle subloops, i.e. all the blocks must belong to the
4447 same loop. */
4448 if (region[i]->loop_father != loop)
4449 return false;
4451 if (region[i] != entry->dest
4452 && region[i] == loop->header)
4453 return false;
4456 loop->copy = loop;
4458 /* In case the function is used for loop header copying (which is the primary
4459 use), ensure that EXIT and its copy will be new latch and entry edges. */
4460 if (loop->header == entry->dest)
4462 copying_header = true;
4463 loop->copy = loop->outer;
4465 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4466 return false;
4468 for (i = 0; i < n_region; i++)
4469 if (region[i] != exit->src
4470 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4471 return false;
4474 if (!region_copy)
4476 region_copy = XNEWVEC (basic_block, n_region);
4477 free_region_copy = true;
4480 gcc_assert (!need_ssa_update_p ());
4482 /* Record blocks outside the region that are dominated by something
4483 inside. */
4484 doms = XNEWVEC (basic_block, n_basic_blocks);
4485 initialize_original_copy_tables ();
4487 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4489 if (entry->dest->count)
4491 total_count = entry->dest->count;
4492 entry_count = entry->count;
4493 /* Fix up corner cases, to avoid division by zero or creation of negative
4494 frequencies. */
4495 if (entry_count > total_count)
4496 entry_count = total_count;
4498 else
4500 total_freq = entry->dest->frequency;
4501 entry_freq = EDGE_FREQUENCY (entry);
4502 /* Fix up corner cases, to avoid division by zero or creation of negative
4503 frequencies. */
4504 if (total_freq == 0)
4505 total_freq = 1;
4506 else if (entry_freq > total_freq)
4507 entry_freq = total_freq;
4510 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4511 split_edge_bb_loc (entry));
4512 if (total_count)
4514 scale_bbs_frequencies_gcov_type (region, n_region,
4515 total_count - entry_count,
4516 total_count);
4517 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4518 total_count);
4520 else
4522 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4523 total_freq);
4524 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4527 if (copying_header)
4529 loop->header = exit->dest;
4530 loop->latch = exit->src;
4533 /* Redirect the entry and add the phi node arguments. */
4534 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4535 gcc_assert (redirected != NULL);
4536 flush_pending_stmts (entry);
4538 /* Concerning updating of dominators: We must recount dominators
4539 for entry block and its copy. Anything that is outside of the
4540 region, but was dominated by something inside needs recounting as
4541 well. */
4542 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4543 doms[n_doms++] = get_bb_original (entry->dest);
4544 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4545 free (doms);
4547 /* Add the other PHI node arguments. */
4548 add_phi_args_after_copy (region_copy, n_region);
4550 /* Update the SSA web. */
4551 update_ssa (TODO_update_ssa);
4553 if (free_region_copy)
4554 free (region_copy);
4556 free_original_copy_tables ();
4557 return true;
4561 DEF_VEC_P(basic_block);
4562 DEF_VEC_ALLOC_P(basic_block,heap);
4565 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4566 adding blocks when the dominator traversal reaches EXIT. This
4567 function silently assumes that ENTRY strictly dominates EXIT. */
4569 static void
4570 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4571 VEC(basic_block,heap) **bbs_p)
4573 basic_block son;
4575 for (son = first_dom_son (CDI_DOMINATORS, entry);
4576 son;
4577 son = next_dom_son (CDI_DOMINATORS, son))
4579 VEC_safe_push (basic_block, heap, *bbs_p, son);
4580 if (son != exit)
4581 gather_blocks_in_sese_region (son, exit, bbs_p);
4586 struct move_stmt_d
4588 tree block;
4589 tree from_context;
4590 tree to_context;
4591 bitmap vars_to_remove;
4592 bool remap_decls_p;
4595 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4596 contained in *TP and change the DECL_CONTEXT of every local
4597 variable referenced in *TP. */
4599 static tree
4600 move_stmt_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4602 struct move_stmt_d *p = (struct move_stmt_d *) data;
4604 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
4605 TREE_BLOCK (*tp) = p->block;
4607 if (OMP_DIRECTIVE_P (*tp))
4609 /* Do not remap variables inside OMP directives. Variables
4610 referenced in clauses and directive header belong to the
4611 parent function and should not be moved into the child
4612 function. */
4613 p->remap_decls_p = false;
4616 if (p->remap_decls_p
4617 && DECL_P (*tp)
4618 && DECL_CONTEXT (*tp) == p->from_context)
4620 DECL_CONTEXT (*tp) = p->to_context;
4622 if (TREE_CODE (*tp) == VAR_DECL)
4624 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4625 f->unexpanded_var_list = tree_cons (0, *tp, f->unexpanded_var_list);
4627 /* Mark *TP to be removed from the original function,
4628 otherwise it will be given a DECL_RTL when the original
4629 function is expanded. */
4630 bitmap_set_bit (p->vars_to_remove, DECL_UID (*tp));
4634 return NULL_TREE;
4638 /* Move basic block BB from function CFUN to function DEST_FN. The
4639 block is moved out of the original linked list and placed after
4640 block AFTER in the new list. Also, the block is removed from the
4641 original array of blocks and placed in DEST_FN's array of blocks.
4642 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4643 updated to reflect the moved edges.
4645 On exit, local variables that need to be removed from
4646 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4648 static void
4649 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4650 basic_block after, bool update_edge_count_p,
4651 bitmap vars_to_remove)
4653 struct control_flow_graph *cfg;
4654 edge_iterator ei;
4655 edge e;
4656 block_stmt_iterator si;
4657 struct move_stmt_d d;
4658 unsigned old_len, new_len;
4659 basic_block *addr;
4661 /* Link BB to the new linked list. */
4662 move_block_after (bb, after);
4664 /* Update the edge count in the corresponding flowgraphs. */
4665 if (update_edge_count_p)
4666 FOR_EACH_EDGE (e, ei, bb->succs)
4668 cfun->cfg->x_n_edges--;
4669 dest_cfun->cfg->x_n_edges++;
4672 /* Remove BB from the original basic block array. */
4673 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4674 cfun->cfg->x_n_basic_blocks--;
4676 /* Grow DEST_CFUN's basic block array if needed. */
4677 cfg = dest_cfun->cfg;
4678 cfg->x_n_basic_blocks++;
4679 if (bb->index > cfg->x_last_basic_block)
4680 cfg->x_last_basic_block = bb->index;
4682 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4683 if ((unsigned) cfg->x_last_basic_block >= old_len)
4685 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4686 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4687 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4688 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4691 VEC_replace (basic_block, cfg->x_basic_block_info,
4692 cfg->x_last_basic_block, bb);
4694 /* The statements in BB need to be associated with a new TREE_BLOCK.
4695 Labels need to be associated with a new label-to-block map. */
4696 memset (&d, 0, sizeof (d));
4697 d.vars_to_remove = vars_to_remove;
4699 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4701 tree stmt = bsi_stmt (si);
4703 d.from_context = cfun->decl;
4704 d.to_context = dest_cfun->decl;
4705 d.remap_decls_p = true;
4706 if (TREE_BLOCK (stmt))
4707 d.block = DECL_INITIAL (dest_cfun->decl);
4709 walk_tree (&stmt, move_stmt_r, &d, NULL);
4711 if (TREE_CODE (stmt) == LABEL_EXPR)
4713 tree label = LABEL_EXPR_LABEL (stmt);
4714 int uid = LABEL_DECL_UID (label);
4716 gcc_assert (uid > -1);
4718 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4719 if (old_len <= (unsigned) uid)
4721 new_len = 3 * uid / 2;
4722 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4723 new_len);
4724 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4725 memset (&addr[old_len], 0,
4726 sizeof (basic_block) * (new_len - old_len));
4729 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4730 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4732 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4734 if (uid >= dest_cfun->last_label_uid)
4735 dest_cfun->last_label_uid = uid + 1;
4741 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4742 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4743 single basic block in the original CFG and the new basic block is
4744 returned. DEST_CFUN must not have a CFG yet.
4746 Note that the region need not be a pure SESE region. Blocks inside
4747 the region may contain calls to abort/exit. The only restriction
4748 is that ENTRY_BB should be the only entry point and it must
4749 dominate EXIT_BB.
4751 All local variables referenced in the region are assumed to be in
4752 the corresponding BLOCK_VARS and unexpanded variable lists
4753 associated with DEST_CFUN. */
4755 basic_block
4756 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4757 basic_block exit_bb)
4759 VEC(basic_block,heap) *bbs;
4760 basic_block after, bb, *entry_pred, *exit_succ;
4761 struct function *saved_cfun;
4762 int *entry_flag, *exit_flag;
4763 unsigned i, num_entry_edges, num_exit_edges;
4764 edge e;
4765 edge_iterator ei;
4766 bitmap vars_to_remove;
4768 saved_cfun = cfun;
4770 /* Collect all the blocks in the region. Manually add ENTRY_BB
4771 because it won't be added by dfs_enumerate_from. */
4772 calculate_dominance_info (CDI_DOMINATORS);
4774 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4775 region. */
4776 gcc_assert (entry_bb != exit_bb
4777 && dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb));
4779 bbs = NULL;
4780 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4781 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4783 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4784 the predecessor edges to ENTRY_BB and the successor edges to
4785 EXIT_BB so that we can re-attach them to the new basic block that
4786 will replace the region. */
4787 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4788 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4789 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4790 i = 0;
4791 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4793 entry_flag[i] = e->flags;
4794 entry_pred[i++] = e->src;
4795 remove_edge (e);
4798 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4799 exit_succ = (basic_block *) xcalloc (num_exit_edges, sizeof (basic_block));
4800 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4801 i = 0;
4802 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4804 exit_flag[i] = e->flags;
4805 exit_succ[i++] = e->dest;
4806 remove_edge (e);
4809 /* Switch context to the child function to initialize DEST_FN's CFG. */
4810 gcc_assert (dest_cfun->cfg == NULL);
4811 cfun = dest_cfun;
4812 init_empty_tree_cfg ();
4813 cfun = saved_cfun;
4815 /* Move blocks from BBS into DEST_CFUN. */
4816 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4817 after = dest_cfun->cfg->x_entry_block_ptr;
4818 vars_to_remove = BITMAP_ALLOC (NULL);
4819 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4821 /* No need to update edge counts on the last block. It has
4822 already been updated earlier when we detached the region from
4823 the original CFG. */
4824 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove);
4825 after = bb;
4828 /* Remove the variables marked in VARS_TO_REMOVE from
4829 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4830 DECL_RTL in the context of CFUN. */
4831 if (!bitmap_empty_p (vars_to_remove))
4833 tree *p;
4835 for (p = &cfun->unexpanded_var_list; *p; )
4837 tree var = TREE_VALUE (*p);
4838 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4840 *p = TREE_CHAIN (*p);
4841 continue;
4844 p = &TREE_CHAIN (*p);
4848 BITMAP_FREE (vars_to_remove);
4850 /* Rewire the entry and exit blocks. The successor to the entry
4851 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4852 the child function. Similarly, the predecessor of DEST_FN's
4853 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4854 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4855 various CFG manipulation function get to the right CFG.
4857 FIXME, this is silly. The CFG ought to become a parameter to
4858 these helpers. */
4859 cfun = dest_cfun;
4860 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4861 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4862 cfun = saved_cfun;
4864 /* Back in the original function, the SESE region has disappeared,
4865 create a new basic block in its place. */
4866 bb = create_empty_bb (entry_pred[0]);
4867 for (i = 0; i < num_entry_edges; i++)
4868 make_edge (entry_pred[i], bb, entry_flag[i]);
4870 for (i = 0; i < num_exit_edges; i++)
4871 make_edge (bb, exit_succ[i], exit_flag[i]);
4873 free (exit_flag);
4874 free (entry_flag);
4875 free (entry_pred);
4876 free (exit_succ);
4877 free_dominance_info (CDI_DOMINATORS);
4878 free_dominance_info (CDI_POST_DOMINATORS);
4879 VEC_free (basic_block, heap, bbs);
4881 return bb;
4885 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4887 void
4888 dump_function_to_file (tree fn, FILE *file, int flags)
4890 tree arg, vars, var;
4891 bool ignore_topmost_bind = false, any_var = false;
4892 basic_block bb;
4893 tree chain;
4894 struct function *saved_cfun;
4896 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4898 arg = DECL_ARGUMENTS (fn);
4899 while (arg)
4901 print_generic_expr (file, arg, dump_flags);
4902 if (TREE_CHAIN (arg))
4903 fprintf (file, ", ");
4904 arg = TREE_CHAIN (arg);
4906 fprintf (file, ")\n");
4908 if (flags & TDF_DETAILS)
4909 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4910 if (flags & TDF_RAW)
4912 dump_node (fn, TDF_SLIM | flags, file);
4913 return;
4916 /* Switch CFUN to point to FN. */
4917 saved_cfun = cfun;
4918 cfun = DECL_STRUCT_FUNCTION (fn);
4920 /* When GIMPLE is lowered, the variables are no longer available in
4921 BIND_EXPRs, so display them separately. */
4922 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4924 ignore_topmost_bind = true;
4926 fprintf (file, "{\n");
4927 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4929 var = TREE_VALUE (vars);
4931 print_generic_decl (file, var, flags);
4932 fprintf (file, "\n");
4934 any_var = true;
4938 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4940 /* Make a CFG based dump. */
4941 check_bb_profile (ENTRY_BLOCK_PTR, file);
4942 if (!ignore_topmost_bind)
4943 fprintf (file, "{\n");
4945 if (any_var && n_basic_blocks)
4946 fprintf (file, "\n");
4948 FOR_EACH_BB (bb)
4949 dump_generic_bb (file, bb, 2, flags);
4951 fprintf (file, "}\n");
4952 check_bb_profile (EXIT_BLOCK_PTR, file);
4954 else
4956 int indent;
4958 /* Make a tree based dump. */
4959 chain = DECL_SAVED_TREE (fn);
4961 if (chain && TREE_CODE (chain) == BIND_EXPR)
4963 if (ignore_topmost_bind)
4965 chain = BIND_EXPR_BODY (chain);
4966 indent = 2;
4968 else
4969 indent = 0;
4971 else
4973 if (!ignore_topmost_bind)
4974 fprintf (file, "{\n");
4975 indent = 2;
4978 if (any_var)
4979 fprintf (file, "\n");
4981 print_generic_stmt_indented (file, chain, flags, indent);
4982 if (ignore_topmost_bind)
4983 fprintf (file, "}\n");
4986 fprintf (file, "\n\n");
4988 /* Restore CFUN. */
4989 cfun = saved_cfun;
4993 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
4995 void
4996 debug_function (tree fn, int flags)
4998 dump_function_to_file (fn, stderr, flags);
5002 /* Pretty print of the loops intermediate representation. */
5003 static void print_loop (FILE *, struct loop *, int);
5004 static void print_pred_bbs (FILE *, basic_block bb);
5005 static void print_succ_bbs (FILE *, basic_block bb);
5008 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5010 static void
5011 print_pred_bbs (FILE *file, basic_block bb)
5013 edge e;
5014 edge_iterator ei;
5016 FOR_EACH_EDGE (e, ei, bb->preds)
5017 fprintf (file, "bb_%d ", e->src->index);
5021 /* Print on FILE the indexes for the successors of basic_block BB. */
5023 static void
5024 print_succ_bbs (FILE *file, basic_block bb)
5026 edge e;
5027 edge_iterator ei;
5029 FOR_EACH_EDGE (e, ei, bb->succs)
5030 fprintf (file, "bb_%d ", e->dest->index);
5034 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5036 static void
5037 print_loop (FILE *file, struct loop *loop, int indent)
5039 char *s_indent;
5040 basic_block bb;
5042 if (loop == NULL)
5043 return;
5045 s_indent = (char *) alloca ((size_t) indent + 1);
5046 memset ((void *) s_indent, ' ', (size_t) indent);
5047 s_indent[indent] = '\0';
5049 /* Print the loop's header. */
5050 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5052 /* Print the loop's body. */
5053 fprintf (file, "%s{\n", s_indent);
5054 FOR_EACH_BB (bb)
5055 if (bb->loop_father == loop)
5057 /* Print the basic_block's header. */
5058 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5059 print_pred_bbs (file, bb);
5060 fprintf (file, "}, succs = {");
5061 print_succ_bbs (file, bb);
5062 fprintf (file, "})\n");
5064 /* Print the basic_block's body. */
5065 fprintf (file, "%s {\n", s_indent);
5066 tree_dump_bb (bb, file, indent + 4);
5067 fprintf (file, "%s }\n", s_indent);
5070 print_loop (file, loop->inner, indent + 2);
5071 fprintf (file, "%s}\n", s_indent);
5072 print_loop (file, loop->next, indent);
5076 /* Follow a CFG edge from the entry point of the program, and on entry
5077 of a loop, pretty print the loop structure on FILE. */
5079 void
5080 print_loop_ir (FILE *file)
5082 basic_block bb;
5084 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5085 if (bb && bb->loop_father)
5086 print_loop (file, bb->loop_father, 0);
5090 /* Debugging loops structure at tree level. */
5092 void
5093 debug_loop_ir (void)
5095 print_loop_ir (stderr);
5099 /* Return true if BB ends with a call, possibly followed by some
5100 instructions that must stay with the call. Return false,
5101 otherwise. */
5103 static bool
5104 tree_block_ends_with_call_p (basic_block bb)
5106 block_stmt_iterator bsi = bsi_last (bb);
5107 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5111 /* Return true if BB ends with a conditional branch. Return false,
5112 otherwise. */
5114 static bool
5115 tree_block_ends_with_condjump_p (basic_block bb)
5117 tree stmt = last_stmt (bb);
5118 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5122 /* Return true if we need to add fake edge to exit at statement T.
5123 Helper function for tree_flow_call_edges_add. */
5125 static bool
5126 need_fake_edge_p (tree t)
5128 tree call;
5130 /* NORETURN and LONGJMP calls already have an edge to exit.
5131 CONST and PURE calls do not need one.
5132 We don't currently check for CONST and PURE here, although
5133 it would be a good idea, because those attributes are
5134 figured out from the RTL in mark_constant_function, and
5135 the counter incrementation code from -fprofile-arcs
5136 leads to different results from -fbranch-probabilities. */
5137 call = get_call_expr_in (t);
5138 if (call
5139 && !(call_expr_flags (call) & ECF_NORETURN))
5140 return true;
5142 if (TREE_CODE (t) == ASM_EXPR
5143 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5144 return true;
5146 return false;
5150 /* Add fake edges to the function exit for any non constant and non
5151 noreturn calls, volatile inline assembly in the bitmap of blocks
5152 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5153 the number of blocks that were split.
5155 The goal is to expose cases in which entering a basic block does
5156 not imply that all subsequent instructions must be executed. */
5158 static int
5159 tree_flow_call_edges_add (sbitmap blocks)
5161 int i;
5162 int blocks_split = 0;
5163 int last_bb = last_basic_block;
5164 bool check_last_block = false;
5166 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5167 return 0;
5169 if (! blocks)
5170 check_last_block = true;
5171 else
5172 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5174 /* In the last basic block, before epilogue generation, there will be
5175 a fallthru edge to EXIT. Special care is required if the last insn
5176 of the last basic block is a call because make_edge folds duplicate
5177 edges, which would result in the fallthru edge also being marked
5178 fake, which would result in the fallthru edge being removed by
5179 remove_fake_edges, which would result in an invalid CFG.
5181 Moreover, we can't elide the outgoing fake edge, since the block
5182 profiler needs to take this into account in order to solve the minimal
5183 spanning tree in the case that the call doesn't return.
5185 Handle this by adding a dummy instruction in a new last basic block. */
5186 if (check_last_block)
5188 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5189 block_stmt_iterator bsi = bsi_last (bb);
5190 tree t = NULL_TREE;
5191 if (!bsi_end_p (bsi))
5192 t = bsi_stmt (bsi);
5194 if (t && need_fake_edge_p (t))
5196 edge e;
5198 e = find_edge (bb, EXIT_BLOCK_PTR);
5199 if (e)
5201 bsi_insert_on_edge (e, build_empty_stmt ());
5202 bsi_commit_edge_inserts ();
5207 /* Now add fake edges to the function exit for any non constant
5208 calls since there is no way that we can determine if they will
5209 return or not... */
5210 for (i = 0; i < last_bb; i++)
5212 basic_block bb = BASIC_BLOCK (i);
5213 block_stmt_iterator bsi;
5214 tree stmt, last_stmt;
5216 if (!bb)
5217 continue;
5219 if (blocks && !TEST_BIT (blocks, i))
5220 continue;
5222 bsi = bsi_last (bb);
5223 if (!bsi_end_p (bsi))
5225 last_stmt = bsi_stmt (bsi);
5228 stmt = bsi_stmt (bsi);
5229 if (need_fake_edge_p (stmt))
5231 edge e;
5232 /* The handling above of the final block before the
5233 epilogue should be enough to verify that there is
5234 no edge to the exit block in CFG already.
5235 Calling make_edge in such case would cause us to
5236 mark that edge as fake and remove it later. */
5237 #ifdef ENABLE_CHECKING
5238 if (stmt == last_stmt)
5240 e = find_edge (bb, EXIT_BLOCK_PTR);
5241 gcc_assert (e == NULL);
5243 #endif
5245 /* Note that the following may create a new basic block
5246 and renumber the existing basic blocks. */
5247 if (stmt != last_stmt)
5249 e = split_block (bb, stmt);
5250 if (e)
5251 blocks_split++;
5253 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5255 bsi_prev (&bsi);
5257 while (!bsi_end_p (bsi));
5261 if (blocks_split)
5262 verify_flow_info ();
5264 return blocks_split;
5267 bool
5268 tree_purge_dead_eh_edges (basic_block bb)
5270 bool changed = false;
5271 edge e;
5272 edge_iterator ei;
5273 tree stmt = last_stmt (bb);
5275 if (stmt && tree_can_throw_internal (stmt))
5276 return false;
5278 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5280 if (e->flags & EDGE_EH)
5282 remove_edge (e);
5283 changed = true;
5285 else
5286 ei_next (&ei);
5289 /* Removal of dead EH edges might change dominators of not
5290 just immediate successors. E.g. when bb1 is changed so that
5291 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5292 eh edges purged by this function in:
5296 1-->2
5297 / \ |
5298 v v |
5299 3-->4 |
5301 --->5
5304 idom(bb5) must be recomputed. For now just free the dominance
5305 info. */
5306 if (changed)
5307 free_dominance_info (CDI_DOMINATORS);
5309 return changed;
5312 bool
5313 tree_purge_all_dead_eh_edges (bitmap blocks)
5315 bool changed = false;
5316 unsigned i;
5317 bitmap_iterator bi;
5319 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5321 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5324 return changed;
5327 /* This function is called whenever a new edge is created or
5328 redirected. */
5330 static void
5331 tree_execute_on_growing_pred (edge e)
5333 basic_block bb = e->dest;
5335 if (phi_nodes (bb))
5336 reserve_phi_args_for_new_edge (bb);
5339 /* This function is called immediately before edge E is removed from
5340 the edge vector E->dest->preds. */
5342 static void
5343 tree_execute_on_shrinking_pred (edge e)
5345 if (phi_nodes (e->dest))
5346 remove_phi_args (e);
5349 /*---------------------------------------------------------------------------
5350 Helper functions for Loop versioning
5351 ---------------------------------------------------------------------------*/
5353 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5354 of 'first'. Both of them are dominated by 'new_head' basic block. When
5355 'new_head' was created by 'second's incoming edge it received phi arguments
5356 on the edge by split_edge(). Later, additional edge 'e' was created to
5357 connect 'new_head' and 'first'. Now this routine adds phi args on this
5358 additional edge 'e' that new_head to second edge received as part of edge
5359 splitting.
5362 static void
5363 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5364 basic_block new_head, edge e)
5366 tree phi1, phi2;
5367 edge e2 = find_edge (new_head, second);
5369 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5370 edge, we should always have an edge from NEW_HEAD to SECOND. */
5371 gcc_assert (e2 != NULL);
5373 /* Browse all 'second' basic block phi nodes and add phi args to
5374 edge 'e' for 'first' head. PHI args are always in correct order. */
5376 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5377 phi2 && phi1;
5378 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5380 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5381 add_phi_arg (phi1, def, e);
5385 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5386 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5387 the destination of the ELSE part. */
5388 static void
5389 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5390 basic_block cond_bb, void *cond_e)
5392 block_stmt_iterator bsi;
5393 tree goto1 = NULL_TREE;
5394 tree goto2 = NULL_TREE;
5395 tree new_cond_expr = NULL_TREE;
5396 tree cond_expr = (tree) cond_e;
5397 edge e0;
5399 /* Build new conditional expr */
5400 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5401 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5402 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5404 /* Add new cond in cond_bb. */
5405 bsi = bsi_start (cond_bb);
5406 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5407 /* Adjust edges appropriately to connect new head with first head
5408 as well as second head. */
5409 e0 = single_succ_edge (cond_bb);
5410 e0->flags &= ~EDGE_FALLTHRU;
5411 e0->flags |= EDGE_FALSE_VALUE;
5414 struct cfg_hooks tree_cfg_hooks = {
5415 "tree",
5416 tree_verify_flow_info,
5417 tree_dump_bb, /* dump_bb */
5418 create_bb, /* create_basic_block */
5419 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5420 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5421 remove_bb, /* delete_basic_block */
5422 tree_split_block, /* split_block */
5423 tree_move_block_after, /* move_block_after */
5424 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5425 tree_merge_blocks, /* merge_blocks */
5426 tree_predict_edge, /* predict_edge */
5427 tree_predicted_by_p, /* predicted_by_p */
5428 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5429 tree_duplicate_bb, /* duplicate_block */
5430 tree_split_edge, /* split_edge */
5431 tree_make_forwarder_block, /* make_forward_block */
5432 NULL, /* tidy_fallthru_edge */
5433 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5434 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5435 tree_flow_call_edges_add, /* flow_call_edges_add */
5436 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5437 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5438 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5439 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5440 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5441 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5442 flush_pending_stmts /* flush_pending_stmts */
5446 /* Split all critical edges. */
5448 static void
5449 split_critical_edges (void)
5451 basic_block bb;
5452 edge e;
5453 edge_iterator ei;
5455 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5456 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5457 mappings around the calls to split_edge. */
5458 start_recording_case_labels ();
5459 FOR_ALL_BB (bb)
5461 FOR_EACH_EDGE (e, ei, bb->succs)
5462 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5464 split_edge (e);
5467 end_recording_case_labels ();
5470 struct tree_opt_pass pass_split_crit_edges =
5472 "crited", /* name */
5473 NULL, /* gate */
5474 split_critical_edges, /* execute */
5475 NULL, /* sub */
5476 NULL, /* next */
5477 0, /* static_pass_number */
5478 TV_TREE_SPLIT_EDGES, /* tv_id */
5479 PROP_cfg, /* properties required */
5480 PROP_no_crit_edges, /* properties_provided */
5481 0, /* properties_destroyed */
5482 0, /* todo_flags_start */
5483 TODO_dump_func, /* todo_flags_finish */
5484 0 /* letter */
5488 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5489 a temporary, make sure and register it to be renamed if necessary,
5490 and finally return the temporary. Put the statements to compute
5491 EXP before the current statement in BSI. */
5493 tree
5494 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5496 tree t, new_stmt, orig_stmt;
5498 if (is_gimple_val (exp))
5499 return exp;
5501 t = make_rename_temp (type, NULL);
5502 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5504 orig_stmt = bsi_stmt (*bsi);
5505 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5506 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5508 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5510 return t;
5513 /* Build a ternary operation and gimplify it. Emit code before BSI.
5514 Return the gimple_val holding the result. */
5516 tree
5517 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5518 tree type, tree a, tree b, tree c)
5520 tree ret;
5522 ret = fold_build3 (code, type, a, b, c);
5523 STRIP_NOPS (ret);
5525 return gimplify_val (bsi, type, ret);
5528 /* Build a binary operation and gimplify it. Emit code before BSI.
5529 Return the gimple_val holding the result. */
5531 tree
5532 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5533 tree type, tree a, tree b)
5535 tree ret;
5537 ret = fold_build2 (code, type, a, b);
5538 STRIP_NOPS (ret);
5540 return gimplify_val (bsi, type, ret);
5543 /* Build a unary operation and gimplify it. Emit code before BSI.
5544 Return the gimple_val holding the result. */
5546 tree
5547 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5548 tree a)
5550 tree ret;
5552 ret = fold_build1 (code, type, a);
5553 STRIP_NOPS (ret);
5555 return gimplify_val (bsi, type, ret);
5560 /* Emit return warnings. */
5562 static void
5563 execute_warn_function_return (void)
5565 #ifdef USE_MAPPED_LOCATION
5566 source_location location;
5567 #else
5568 location_t *locus;
5569 #endif
5570 tree last;
5571 edge e;
5572 edge_iterator ei;
5574 /* If we have a path to EXIT, then we do return. */
5575 if (TREE_THIS_VOLATILE (cfun->decl)
5576 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5578 #ifdef USE_MAPPED_LOCATION
5579 location = UNKNOWN_LOCATION;
5580 #else
5581 locus = NULL;
5582 #endif
5583 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5585 last = last_stmt (e->src);
5586 if (TREE_CODE (last) == RETURN_EXPR
5587 #ifdef USE_MAPPED_LOCATION
5588 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5589 #else
5590 && (locus = EXPR_LOCUS (last)) != NULL)
5591 #endif
5592 break;
5594 #ifdef USE_MAPPED_LOCATION
5595 if (location == UNKNOWN_LOCATION)
5596 location = cfun->function_end_locus;
5597 warning (0, "%H%<noreturn%> function does return", &location);
5598 #else
5599 if (!locus)
5600 locus = &cfun->function_end_locus;
5601 warning (0, "%H%<noreturn%> function does return", locus);
5602 #endif
5605 /* If we see "return;" in some basic block, then we do reach the end
5606 without returning a value. */
5607 else if (warn_return_type
5608 && !TREE_NO_WARNING (cfun->decl)
5609 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5610 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5612 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5614 tree last = last_stmt (e->src);
5615 if (TREE_CODE (last) == RETURN_EXPR
5616 && TREE_OPERAND (last, 0) == NULL
5617 && !TREE_NO_WARNING (last))
5619 #ifdef USE_MAPPED_LOCATION
5620 location = EXPR_LOCATION (last);
5621 if (location == UNKNOWN_LOCATION)
5622 location = cfun->function_end_locus;
5623 warning (0, "%Hcontrol reaches end of non-void function", &location);
5624 #else
5625 locus = EXPR_LOCUS (last);
5626 if (!locus)
5627 locus = &cfun->function_end_locus;
5628 warning (0, "%Hcontrol reaches end of non-void function", locus);
5629 #endif
5630 TREE_NO_WARNING (cfun->decl) = 1;
5631 break;
5638 /* Given a basic block B which ends with a conditional and has
5639 precisely two successors, determine which of the edges is taken if
5640 the conditional is true and which is taken if the conditional is
5641 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5643 void
5644 extract_true_false_edges_from_block (basic_block b,
5645 edge *true_edge,
5646 edge *false_edge)
5648 edge e = EDGE_SUCC (b, 0);
5650 if (e->flags & EDGE_TRUE_VALUE)
5652 *true_edge = e;
5653 *false_edge = EDGE_SUCC (b, 1);
5655 else
5657 *false_edge = e;
5658 *true_edge = EDGE_SUCC (b, 1);
5662 struct tree_opt_pass pass_warn_function_return =
5664 NULL, /* name */
5665 NULL, /* gate */
5666 execute_warn_function_return, /* execute */
5667 NULL, /* sub */
5668 NULL, /* next */
5669 0, /* static_pass_number */
5670 0, /* tv_id */
5671 PROP_cfg, /* properties_required */
5672 0, /* properties_provided */
5673 0, /* properties_destroyed */
5674 0, /* todo_flags_start */
5675 0, /* todo_flags_finish */
5676 0 /* letter */
5679 /* Emit noreturn warnings. */
5681 static void
5682 execute_warn_function_noreturn (void)
5684 if (warn_missing_noreturn
5685 && !TREE_THIS_VOLATILE (cfun->decl)
5686 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5687 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5688 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5689 "for attribute %<noreturn%>",
5690 cfun->decl);
5693 struct tree_opt_pass pass_warn_function_noreturn =
5695 NULL, /* name */
5696 NULL, /* gate */
5697 execute_warn_function_noreturn, /* execute */
5698 NULL, /* sub */
5699 NULL, /* next */
5700 0, /* static_pass_number */
5701 0, /* tv_id */
5702 PROP_cfg, /* properties_required */
5703 0, /* properties_provided */
5704 0, /* properties_destroyed */
5705 0, /* todo_flags_start */
5706 0, /* todo_flags_finish */
5707 0 /* letter */