* pthread_support.c: Conditionally include dlfcn.h.
[official-gcc.git] / gcc / tree-cfg.c
blob0724c1021636a4bc6a90220b7500a87cf93f3300
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
126 void
127 init_empty_tree_cfg (void)
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
158 static void
159 build_tree_cfg (tree *tp)
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (*tp);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
202 /* Create the edges of the flowgraph. */
203 make_edges ();
205 /* Debugging dumps. */
207 /* Write the flowgraph to a VCG file. */
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
227 static void
228 execute_build_cfg (void)
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
233 struct tree_opt_pass pass_build_cfg =
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts, /* todo_flags_finish */
247 0 /* letter */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
253 normal form. */
255 static void
256 factor_computed_gotos (void)
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
268 FOR_EACH_BB (bb)
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
285 tree assignment;
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build2 (MODIFY_EXPR, ptr_type_node,
317 var, GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
329 static void
330 make_blocks (tree stmt_list)
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
338 while (!tsi_end_p (i))
340 tree prev_stmt;
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
368 tsi_next (&i);
369 first_stmt_of_list = false;
374 /* Create and return a new empty basic block after bb AFTER. */
376 static basic_block
377 create_bb (void *h, void *e, basic_block after)
379 basic_block bb;
381 gcc_assert (!e);
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
386 bb = alloc_block ();
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
390 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392 /* Add the new block to the linked list of blocks. */
393 link_block (bb, after);
395 /* Grow the basic block array if needed. */
396 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 size_t old_size = VEC_length (basic_block, basic_block_info);
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 basic_block *p;
401 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
402 p = VEC_address (basic_block, basic_block_info);
403 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
406 /* Add the newly created block to the array. */
407 SET_BASIC_BLOCK (last_basic_block, bb);
409 n_basic_blocks++;
410 last_basic_block++;
412 return bb;
416 /*---------------------------------------------------------------------------
417 Edge creation
418 ---------------------------------------------------------------------------*/
420 /* Fold COND_EXPR_COND of each COND_EXPR. */
422 void
423 fold_cond_expr_cond (void)
425 basic_block bb;
427 FOR_EACH_BB (bb)
429 tree stmt = last_stmt (bb);
431 if (stmt
432 && TREE_CODE (stmt) == COND_EXPR)
434 tree cond = fold (COND_EXPR_COND (stmt));
435 if (integer_zerop (cond))
436 COND_EXPR_COND (stmt) = boolean_false_node;
437 else if (integer_onep (cond))
438 COND_EXPR_COND (stmt) = boolean_true_node;
443 /* Join all the blocks in the flowgraph. */
445 static void
446 make_edges (void)
448 basic_block bb;
450 /* Create an edge from entry to the first block with executable
451 statements in it. */
452 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
454 /* Traverse the basic block array placing edges. */
455 FOR_EACH_BB (bb)
457 tree first = first_stmt (bb);
458 tree last = last_stmt (bb);
460 if (first)
462 /* Edges for statements that always alter flow control. */
463 if (is_ctrl_stmt (last))
464 make_ctrl_stmt_edges (bb);
466 /* Edges for statements that sometimes alter flow control. */
467 if (is_ctrl_altering_stmt (last))
468 make_exit_edges (bb);
471 /* Finally, if no edges were created above, this is a regular
472 basic block that only needs a fallthru edge. */
473 if (EDGE_COUNT (bb->succs) == 0)
474 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
477 /* We do not care about fake edges, so remove any that the CFG
478 builder inserted for completeness. */
479 remove_fake_exit_edges ();
481 /* Fold COND_EXPR_COND of each COND_EXPR. */
482 fold_cond_expr_cond ();
484 /* Clean up the graph and warn for unreachable code. */
485 cleanup_tree_cfg ();
489 /* Link an OMP_SECTIONS block to all the OMP_SECTION blocks in its body. */
491 static void
492 make_omp_sections_edges (basic_block bb)
494 basic_block exit_bb;
495 size_t i, n;
496 tree vec, stmt;
498 stmt = last_stmt (bb);
499 vec = OMP_SECTIONS_SECTIONS (stmt);
500 n = TREE_VEC_LENGTH (vec);
501 exit_bb = bb_for_stmt (TREE_VEC_ELT (vec, n - 1));
503 for (i = 0; i < n - 1; i += 2)
505 basic_block start_bb = bb_for_stmt (TREE_VEC_ELT (vec, i));
506 basic_block end_bb = bb_for_stmt (TREE_VEC_ELT (vec, i + 1));
507 make_edge (bb, start_bb, EDGE_ABNORMAL);
508 make_edge (end_bb, exit_bb, EDGE_FALLTHRU);
511 /* Once the CFG has been built, the vector of sections is no longer
512 useful. The region can be easily obtained with build_omp_regions.
513 Furthermore, this sharing of tree expressions is not allowed by the
514 statement verifier. */
515 OMP_SECTIONS_SECTIONS (stmt) = NULL_TREE;
520 /* Create edges for control statement at basic block BB. */
522 static void
523 make_ctrl_stmt_edges (basic_block bb)
525 tree last = last_stmt (bb);
527 gcc_assert (last);
528 switch (TREE_CODE (last))
530 case GOTO_EXPR:
531 make_goto_expr_edges (bb);
532 break;
534 case RETURN_EXPR:
535 make_edge (bb, EXIT_BLOCK_PTR, 0);
536 break;
538 case COND_EXPR:
539 make_cond_expr_edges (bb);
540 break;
542 case SWITCH_EXPR:
543 make_switch_expr_edges (bb);
544 break;
546 case RESX_EXPR:
547 make_eh_edges (last);
548 /* Yet another NORETURN hack. */
549 if (EDGE_COUNT (bb->succs) == 0)
550 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
551 break;
553 default:
554 gcc_unreachable ();
559 /* Create exit edges for statements in block BB that alter the flow of
560 control. Statements that alter the control flow are 'goto', 'return'
561 and calls to non-returning functions. */
563 static void
564 make_exit_edges (basic_block bb)
566 tree last = last_stmt (bb), op;
568 gcc_assert (last);
569 switch (TREE_CODE (last))
571 case RESX_EXPR:
572 break;
573 case CALL_EXPR:
574 /* If this function receives a nonlocal goto, then we need to
575 make edges from this call site to all the nonlocal goto
576 handlers. */
577 if (TREE_SIDE_EFFECTS (last)
578 && current_function_has_nonlocal_label)
579 make_goto_expr_edges (bb);
581 /* If this statement has reachable exception handlers, then
582 create abnormal edges to them. */
583 make_eh_edges (last);
585 /* Some calls are known not to return. For such calls we create
586 a fake edge.
588 We really need to revamp how we build edges so that it's not
589 such a bloody pain to avoid creating edges for this case since
590 all we do is remove these edges when we're done building the
591 CFG. */
592 if (call_expr_flags (last) & ECF_NORETURN)
594 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
595 return;
598 /* Don't forget the fall-thru edge. */
599 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
600 break;
602 case MODIFY_EXPR:
603 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
604 may have an abnormal edge. Search the RHS for this case and
605 create any required edges. */
606 op = get_call_expr_in (last);
607 if (op && TREE_SIDE_EFFECTS (op)
608 && current_function_has_nonlocal_label)
609 make_goto_expr_edges (bb);
611 make_eh_edges (last);
612 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
613 break;
615 case OMP_PARALLEL:
616 case OMP_FOR:
617 case OMP_SINGLE:
618 case OMP_MASTER:
619 case OMP_ORDERED:
620 case OMP_CRITICAL:
621 make_edge (bb, bb->next_bb, EDGE_ABNORMAL);
623 case OMP_RETURN_EXPR:
624 if (EDGE_COUNT (bb->succs) == 0)
625 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
626 break;
628 case OMP_SECTIONS:
629 make_omp_sections_edges (bb);
630 break;
632 case OMP_SECTION:
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
634 break;
636 default:
637 gcc_unreachable ();
642 /* Create the edges for a COND_EXPR starting at block BB.
643 At this point, both clauses must contain only simple gotos. */
645 static void
646 make_cond_expr_edges (basic_block bb)
648 tree entry = last_stmt (bb);
649 basic_block then_bb, else_bb;
650 tree then_label, else_label;
651 edge e;
653 gcc_assert (entry);
654 gcc_assert (TREE_CODE (entry) == COND_EXPR);
656 /* Entry basic blocks for each component. */
657 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
658 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
659 then_bb = label_to_block (then_label);
660 else_bb = label_to_block (else_label);
662 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
663 #ifdef USE_MAPPED_LOCATION
664 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
665 #else
666 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
667 #endif
668 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
669 if (e)
671 #ifdef USE_MAPPED_LOCATION
672 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
673 #else
674 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
675 #endif
679 /* Hashing routine for EDGE_TO_CASES. */
681 static hashval_t
682 edge_to_cases_hash (const void *p)
684 edge e = ((struct edge_to_cases_elt *)p)->e;
686 /* Hash on the edge itself (which is a pointer). */
687 return htab_hash_pointer (e);
690 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
691 for equality is just a pointer comparison. */
693 static int
694 edge_to_cases_eq (const void *p1, const void *p2)
696 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
697 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
699 return e1 == e2;
702 /* Called for each element in the hash table (P) as we delete the
703 edge to cases hash table.
705 Clear all the TREE_CHAINs to prevent problems with copying of
706 SWITCH_EXPRs and structure sharing rules, then free the hash table
707 element. */
709 static void
710 edge_to_cases_cleanup (void *p)
712 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
713 tree t, next;
715 for (t = elt->case_labels; t; t = next)
717 next = TREE_CHAIN (t);
718 TREE_CHAIN (t) = NULL;
720 free (p);
723 /* Start recording information mapping edges to case labels. */
725 void
726 start_recording_case_labels (void)
728 gcc_assert (edge_to_cases == NULL);
730 edge_to_cases = htab_create (37,
731 edge_to_cases_hash,
732 edge_to_cases_eq,
733 edge_to_cases_cleanup);
736 /* Return nonzero if we are recording information for case labels. */
738 static bool
739 recording_case_labels_p (void)
741 return (edge_to_cases != NULL);
744 /* Stop recording information mapping edges to case labels and
745 remove any information we have recorded. */
746 void
747 end_recording_case_labels (void)
749 htab_delete (edge_to_cases);
750 edge_to_cases = NULL;
753 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
755 static void
756 record_switch_edge (edge e, tree case_label)
758 struct edge_to_cases_elt *elt;
759 void **slot;
761 /* Build a hash table element so we can see if E is already
762 in the table. */
763 elt = XNEW (struct edge_to_cases_elt);
764 elt->e = e;
765 elt->case_labels = case_label;
767 slot = htab_find_slot (edge_to_cases, elt, INSERT);
769 if (*slot == NULL)
771 /* E was not in the hash table. Install E into the hash table. */
772 *slot = (void *)elt;
774 else
776 /* E was already in the hash table. Free ELT as we do not need it
777 anymore. */
778 free (elt);
780 /* Get the entry stored in the hash table. */
781 elt = (struct edge_to_cases_elt *) *slot;
783 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
784 TREE_CHAIN (case_label) = elt->case_labels;
785 elt->case_labels = case_label;
789 /* If we are inside a {start,end}_recording_cases block, then return
790 a chain of CASE_LABEL_EXPRs from T which reference E.
792 Otherwise return NULL. */
794 static tree
795 get_cases_for_edge (edge e, tree t)
797 struct edge_to_cases_elt elt, *elt_p;
798 void **slot;
799 size_t i, n;
800 tree vec;
802 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
803 chains available. Return NULL so the caller can detect this case. */
804 if (!recording_case_labels_p ())
805 return NULL;
807 restart:
808 elt.e = e;
809 elt.case_labels = NULL;
810 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
812 if (slot)
814 elt_p = (struct edge_to_cases_elt *)*slot;
815 return elt_p->case_labels;
818 /* If we did not find E in the hash table, then this must be the first
819 time we have been queried for information about E & T. Add all the
820 elements from T to the hash table then perform the query again. */
822 vec = SWITCH_LABELS (t);
823 n = TREE_VEC_LENGTH (vec);
824 for (i = 0; i < n; i++)
826 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
827 basic_block label_bb = label_to_block (lab);
828 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
830 goto restart;
833 /* Create the edges for a SWITCH_EXPR starting at block BB.
834 At this point, the switch body has been lowered and the
835 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
837 static void
838 make_switch_expr_edges (basic_block bb)
840 tree entry = last_stmt (bb);
841 size_t i, n;
842 tree vec;
844 vec = SWITCH_LABELS (entry);
845 n = TREE_VEC_LENGTH (vec);
847 for (i = 0; i < n; ++i)
849 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
850 basic_block label_bb = label_to_block (lab);
851 make_edge (bb, label_bb, 0);
856 /* Return the basic block holding label DEST. */
858 basic_block
859 label_to_block_fn (struct function *ifun, tree dest)
861 int uid = LABEL_DECL_UID (dest);
863 /* We would die hard when faced by an undefined label. Emit a label to
864 the very first basic block. This will hopefully make even the dataflow
865 and undefined variable warnings quite right. */
866 if ((errorcount || sorrycount) && uid < 0)
868 block_stmt_iterator bsi =
869 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
870 tree stmt;
872 stmt = build1 (LABEL_EXPR, void_type_node, dest);
873 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
874 uid = LABEL_DECL_UID (dest);
876 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
877 <= (unsigned int) uid)
878 return NULL;
879 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
882 /* Create edges for a goto statement at block BB. */
884 static void
885 make_goto_expr_edges (basic_block bb)
887 tree goto_t;
888 basic_block target_bb;
889 int for_call;
890 block_stmt_iterator last = bsi_last (bb);
892 goto_t = bsi_stmt (last);
894 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
895 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
896 from a nonlocal goto. */
897 if (TREE_CODE (goto_t) != GOTO_EXPR)
898 for_call = 1;
899 else
901 tree dest = GOTO_DESTINATION (goto_t);
902 for_call = 0;
904 /* A GOTO to a local label creates normal edges. */
905 if (simple_goto_p (goto_t))
907 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
908 #ifdef USE_MAPPED_LOCATION
909 e->goto_locus = EXPR_LOCATION (goto_t);
910 #else
911 e->goto_locus = EXPR_LOCUS (goto_t);
912 #endif
913 bsi_remove (&last, true);
914 return;
917 /* Nothing more to do for nonlocal gotos. */
918 if (TREE_CODE (dest) == LABEL_DECL)
919 return;
921 /* Computed gotos remain. */
924 /* Look for the block starting with the destination label. In the
925 case of a computed goto, make an edge to any label block we find
926 in the CFG. */
927 FOR_EACH_BB (target_bb)
929 block_stmt_iterator bsi;
931 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
933 tree target = bsi_stmt (bsi);
935 if (TREE_CODE (target) != LABEL_EXPR)
936 break;
938 if (
939 /* Computed GOTOs. Make an edge to every label block that has
940 been marked as a potential target for a computed goto. */
941 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
942 /* Nonlocal GOTO target. Make an edge to every label block
943 that has been marked as a potential target for a nonlocal
944 goto. */
945 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
947 make_edge (bb, target_bb, EDGE_ABNORMAL);
948 break;
953 /* Degenerate case of computed goto with no labels. */
954 if (!for_call && EDGE_COUNT (bb->succs) == 0)
955 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
959 /*---------------------------------------------------------------------------
960 Flowgraph analysis
961 ---------------------------------------------------------------------------*/
963 /* Cleanup useless labels in basic blocks. This is something we wish
964 to do early because it allows us to group case labels before creating
965 the edges for the CFG, and it speeds up block statement iterators in
966 all passes later on.
967 We only run this pass once, running it more than once is probably not
968 profitable. */
970 /* A map from basic block index to the leading label of that block. */
971 static tree *label_for_bb;
973 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
974 static void
975 update_eh_label (struct eh_region *region)
977 tree old_label = get_eh_region_tree_label (region);
978 if (old_label)
980 tree new_label;
981 basic_block bb = label_to_block (old_label);
983 /* ??? After optimizing, there may be EH regions with labels
984 that have already been removed from the function body, so
985 there is no basic block for them. */
986 if (! bb)
987 return;
989 new_label = label_for_bb[bb->index];
990 set_eh_region_tree_label (region, new_label);
994 /* Given LABEL return the first label in the same basic block. */
995 static tree
996 main_block_label (tree label)
998 basic_block bb = label_to_block (label);
1000 /* label_to_block possibly inserted undefined label into the chain. */
1001 if (!label_for_bb[bb->index])
1002 label_for_bb[bb->index] = label;
1003 return label_for_bb[bb->index];
1006 /* Cleanup redundant labels. This is a three-step process:
1007 1) Find the leading label for each block.
1008 2) Redirect all references to labels to the leading labels.
1009 3) Cleanup all useless labels. */
1011 void
1012 cleanup_dead_labels (void)
1014 basic_block bb;
1015 label_for_bb = XCNEWVEC (tree, last_basic_block);
1017 /* Find a suitable label for each block. We use the first user-defined
1018 label if there is one, or otherwise just the first label we see. */
1019 FOR_EACH_BB (bb)
1021 block_stmt_iterator i;
1023 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1028 break;
1030 label = LABEL_EXPR_LABEL (stmt);
1032 /* If we have not yet seen a label for the current block,
1033 remember this one and see if there are more labels. */
1034 if (! label_for_bb[bb->index])
1036 label_for_bb[bb->index] = label;
1037 continue;
1040 /* If we did see a label for the current block already, but it
1041 is an artificially created label, replace it if the current
1042 label is a user defined label. */
1043 if (! DECL_ARTIFICIAL (label)
1044 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1046 label_for_bb[bb->index] = label;
1047 break;
1052 /* Now redirect all jumps/branches to the selected label.
1053 First do so for each block ending in a control statement. */
1054 FOR_EACH_BB (bb)
1056 tree stmt = last_stmt (bb);
1057 if (!stmt)
1058 continue;
1060 switch (TREE_CODE (stmt))
1062 case COND_EXPR:
1064 tree true_branch, false_branch;
1066 true_branch = COND_EXPR_THEN (stmt);
1067 false_branch = COND_EXPR_ELSE (stmt);
1069 GOTO_DESTINATION (true_branch)
1070 = main_block_label (GOTO_DESTINATION (true_branch));
1071 GOTO_DESTINATION (false_branch)
1072 = main_block_label (GOTO_DESTINATION (false_branch));
1074 break;
1077 case SWITCH_EXPR:
1079 size_t i;
1080 tree vec = SWITCH_LABELS (stmt);
1081 size_t n = TREE_VEC_LENGTH (vec);
1083 /* Replace all destination labels. */
1084 for (i = 0; i < n; ++i)
1086 tree elt = TREE_VEC_ELT (vec, i);
1087 tree label = main_block_label (CASE_LABEL (elt));
1088 CASE_LABEL (elt) = label;
1090 break;
1093 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1094 remove them until after we've created the CFG edges. */
1095 case GOTO_EXPR:
1096 if (! computed_goto_p (stmt))
1098 GOTO_DESTINATION (stmt)
1099 = main_block_label (GOTO_DESTINATION (stmt));
1100 break;
1103 default:
1104 break;
1108 for_each_eh_region (update_eh_label);
1110 /* Finally, purge dead labels. All user-defined labels and labels that
1111 can be the target of non-local gotos and labels which have their
1112 address taken are preserved. */
1113 FOR_EACH_BB (bb)
1115 block_stmt_iterator i;
1116 tree label_for_this_bb = label_for_bb[bb->index];
1118 if (! label_for_this_bb)
1119 continue;
1121 for (i = bsi_start (bb); !bsi_end_p (i); )
1123 tree label, stmt = bsi_stmt (i);
1125 if (TREE_CODE (stmt) != LABEL_EXPR)
1126 break;
1128 label = LABEL_EXPR_LABEL (stmt);
1130 if (label == label_for_this_bb
1131 || ! DECL_ARTIFICIAL (label)
1132 || DECL_NONLOCAL (label)
1133 || FORCED_LABEL (label))
1134 bsi_next (&i);
1135 else
1136 bsi_remove (&i, true);
1140 free (label_for_bb);
1143 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1144 and scan the sorted vector of cases. Combine the ones jumping to the
1145 same label.
1146 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1148 void
1149 group_case_labels (void)
1151 basic_block bb;
1153 FOR_EACH_BB (bb)
1155 tree stmt = last_stmt (bb);
1156 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1158 tree labels = SWITCH_LABELS (stmt);
1159 int old_size = TREE_VEC_LENGTH (labels);
1160 int i, j, new_size = old_size;
1161 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1162 tree default_label;
1164 /* The default label is always the last case in a switch
1165 statement after gimplification. */
1166 default_label = CASE_LABEL (default_case);
1168 /* Look for possible opportunities to merge cases.
1169 Ignore the last element of the label vector because it
1170 must be the default case. */
1171 i = 0;
1172 while (i < old_size - 1)
1174 tree base_case, base_label, base_high;
1175 base_case = TREE_VEC_ELT (labels, i);
1177 gcc_assert (base_case);
1178 base_label = CASE_LABEL (base_case);
1180 /* Discard cases that have the same destination as the
1181 default case. */
1182 if (base_label == default_label)
1184 TREE_VEC_ELT (labels, i) = NULL_TREE;
1185 i++;
1186 new_size--;
1187 continue;
1190 base_high = CASE_HIGH (base_case) ?
1191 CASE_HIGH (base_case) : CASE_LOW (base_case);
1192 i++;
1193 /* Try to merge case labels. Break out when we reach the end
1194 of the label vector or when we cannot merge the next case
1195 label with the current one. */
1196 while (i < old_size - 1)
1198 tree merge_case = TREE_VEC_ELT (labels, i);
1199 tree merge_label = CASE_LABEL (merge_case);
1200 tree t = int_const_binop (PLUS_EXPR, base_high,
1201 integer_one_node, 1);
1203 /* Merge the cases if they jump to the same place,
1204 and their ranges are consecutive. */
1205 if (merge_label == base_label
1206 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1208 base_high = CASE_HIGH (merge_case) ?
1209 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1210 CASE_HIGH (base_case) = base_high;
1211 TREE_VEC_ELT (labels, i) = NULL_TREE;
1212 new_size--;
1213 i++;
1215 else
1216 break;
1220 /* Compress the case labels in the label vector, and adjust the
1221 length of the vector. */
1222 for (i = 0, j = 0; i < new_size; i++)
1224 while (! TREE_VEC_ELT (labels, j))
1225 j++;
1226 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1228 TREE_VEC_LENGTH (labels) = new_size;
1233 /* Checks whether we can merge block B into block A. */
1235 static bool
1236 tree_can_merge_blocks_p (basic_block a, basic_block b)
1238 tree stmt;
1239 block_stmt_iterator bsi;
1240 tree phi;
1242 if (!single_succ_p (a))
1243 return false;
1245 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1246 return false;
1248 if (single_succ (a) != b)
1249 return false;
1251 if (!single_pred_p (b))
1252 return false;
1254 if (b == EXIT_BLOCK_PTR)
1255 return false;
1257 /* If A ends by a statement causing exceptions or something similar, we
1258 cannot merge the blocks. */
1259 stmt = last_stmt (a);
1260 if (stmt && stmt_ends_bb_p (stmt))
1261 return false;
1263 /* Do not allow a block with only a non-local label to be merged. */
1264 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1265 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1266 return false;
1268 /* It must be possible to eliminate all phi nodes in B. If ssa form
1269 is not up-to-date, we cannot eliminate any phis. */
1270 phi = phi_nodes (b);
1271 if (phi)
1273 if (need_ssa_update_p ())
1274 return false;
1276 for (; phi; phi = PHI_CHAIN (phi))
1277 if (!is_gimple_reg (PHI_RESULT (phi))
1278 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1279 return false;
1282 /* Do not remove user labels. */
1283 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1285 stmt = bsi_stmt (bsi);
1286 if (TREE_CODE (stmt) != LABEL_EXPR)
1287 break;
1288 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1289 return false;
1292 /* Protect the loop latches. */
1293 if (current_loops
1294 && b->loop_father->latch == b)
1295 return false;
1297 return true;
1300 /* Replaces all uses of NAME by VAL. */
1302 void
1303 replace_uses_by (tree name, tree val)
1305 imm_use_iterator imm_iter;
1306 use_operand_p use;
1307 tree stmt;
1308 edge e;
1309 unsigned i;
1310 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1312 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1314 stmt = USE_STMT (use);
1315 replace_exp (use, val);
1317 if (TREE_CODE (stmt) == PHI_NODE)
1319 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1320 if (e->flags & EDGE_ABNORMAL)
1322 /* This can only occur for virtual operands, since
1323 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1324 would prevent replacement. */
1325 gcc_assert (!is_gimple_reg (name));
1326 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1329 else
1330 VEC_safe_push (tree, heap, stmts, stmt);
1333 /* We do not update the statements in the loop above. Consider
1334 x = w * w;
1336 If we performed the update in the first loop, the statement
1337 would be rescanned after first occurrence of w is replaced,
1338 the new uses would be placed to the beginning of the list,
1339 and we would never process them. */
1340 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1342 tree rhs;
1344 fold_stmt_inplace (stmt);
1346 rhs = get_rhs (stmt);
1347 if (TREE_CODE (rhs) == ADDR_EXPR)
1348 recompute_tree_invariant_for_addr_expr (rhs);
1350 /* If the statement could throw and now cannot, we need to prune cfg. */
1351 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
1352 tree_purge_dead_eh_edges (bb_for_stmt (stmt));
1354 mark_new_vars_to_rename (stmt);
1357 VEC_free (tree, heap, stmts);
1359 /* Also update the trees stored in loop structures. */
1360 if (current_loops)
1362 struct loop *loop;
1364 for (i = 0; i < current_loops->num; i++)
1366 loop = current_loops->parray[i];
1367 if (loop)
1368 substitute_in_loop_info (loop, name, val);
1373 /* Merge block B into block A. */
1375 static void
1376 tree_merge_blocks (basic_block a, basic_block b)
1378 block_stmt_iterator bsi;
1379 tree_stmt_iterator last;
1380 tree phi;
1382 if (dump_file)
1383 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1385 /* Remove all single-valued PHI nodes from block B of the form
1386 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1387 bsi = bsi_last (a);
1388 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1390 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1391 tree copy;
1392 bool may_replace_uses = may_propagate_copy (def, use);
1394 /* In case we have loops to care about, do not propagate arguments of
1395 loop closed ssa phi nodes. */
1396 if (current_loops
1397 && is_gimple_reg (def)
1398 && TREE_CODE (use) == SSA_NAME
1399 && a->loop_father != b->loop_father)
1400 may_replace_uses = false;
1402 if (!may_replace_uses)
1404 gcc_assert (is_gimple_reg (def));
1406 /* Note that just emitting the copies is fine -- there is no problem
1407 with ordering of phi nodes. This is because A is the single
1408 predecessor of B, therefore results of the phi nodes cannot
1409 appear as arguments of the phi nodes. */
1410 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1411 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1412 SET_PHI_RESULT (phi, NULL_TREE);
1413 SSA_NAME_DEF_STMT (def) = copy;
1415 else
1416 replace_uses_by (def, use);
1418 remove_phi_node (phi, NULL);
1421 /* Ensure that B follows A. */
1422 move_block_after (b, a);
1424 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1425 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1427 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1428 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1430 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1432 tree label = bsi_stmt (bsi);
1434 bsi_remove (&bsi, false);
1435 /* Now that we can thread computed gotos, we might have
1436 a situation where we have a forced label in block B
1437 However, the label at the start of block B might still be
1438 used in other ways (think about the runtime checking for
1439 Fortran assigned gotos). So we can not just delete the
1440 label. Instead we move the label to the start of block A. */
1441 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1443 block_stmt_iterator dest_bsi = bsi_start (a);
1444 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1447 else
1449 set_bb_for_stmt (bsi_stmt (bsi), a);
1450 bsi_next (&bsi);
1454 /* Merge the chains. */
1455 last = tsi_last (a->stmt_list);
1456 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1457 b->stmt_list = NULL;
1461 /* Return the one of two successors of BB that is not reachable by a
1462 reached by a complex edge, if there is one. Else, return BB. We use
1463 this in optimizations that use post-dominators for their heuristics,
1464 to catch the cases in C++ where function calls are involved. */
1466 basic_block
1467 single_noncomplex_succ (basic_block bb)
1469 edge e0, e1;
1470 if (EDGE_COUNT (bb->succs) != 2)
1471 return bb;
1473 e0 = EDGE_SUCC (bb, 0);
1474 e1 = EDGE_SUCC (bb, 1);
1475 if (e0->flags & EDGE_COMPLEX)
1476 return e1->dest;
1477 if (e1->flags & EDGE_COMPLEX)
1478 return e0->dest;
1480 return bb;
1485 /* Walk the function tree removing unnecessary statements.
1487 * Empty statement nodes are removed
1489 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1491 * Unnecessary COND_EXPRs are removed
1493 * Some unnecessary BIND_EXPRs are removed
1495 Clearly more work could be done. The trick is doing the analysis
1496 and removal fast enough to be a net improvement in compile times.
1498 Note that when we remove a control structure such as a COND_EXPR
1499 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1500 to ensure we eliminate all the useless code. */
1502 struct rus_data
1504 tree *last_goto;
1505 bool repeat;
1506 bool may_throw;
1507 bool may_branch;
1508 bool has_label;
1511 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1513 static bool
1514 remove_useless_stmts_warn_notreached (tree stmt)
1516 if (EXPR_HAS_LOCATION (stmt))
1518 location_t loc = EXPR_LOCATION (stmt);
1519 if (LOCATION_LINE (loc) > 0)
1521 warning (0, "%Hwill never be executed", &loc);
1522 return true;
1526 switch (TREE_CODE (stmt))
1528 case STATEMENT_LIST:
1530 tree_stmt_iterator i;
1531 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1532 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1533 return true;
1535 break;
1537 case COND_EXPR:
1538 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1539 return true;
1540 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1541 return true;
1542 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1543 return true;
1544 break;
1546 case TRY_FINALLY_EXPR:
1547 case TRY_CATCH_EXPR:
1548 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1549 return true;
1550 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1551 return true;
1552 break;
1554 case CATCH_EXPR:
1555 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1556 case EH_FILTER_EXPR:
1557 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1558 case BIND_EXPR:
1559 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1561 default:
1562 /* Not a live container. */
1563 break;
1566 return false;
1569 static void
1570 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1572 tree then_clause, else_clause, cond;
1573 bool save_has_label, then_has_label, else_has_label;
1575 save_has_label = data->has_label;
1576 data->has_label = false;
1577 data->last_goto = NULL;
1579 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1581 then_has_label = data->has_label;
1582 data->has_label = false;
1583 data->last_goto = NULL;
1585 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1587 else_has_label = data->has_label;
1588 data->has_label = save_has_label | then_has_label | else_has_label;
1590 then_clause = COND_EXPR_THEN (*stmt_p);
1591 else_clause = COND_EXPR_ELSE (*stmt_p);
1592 cond = fold (COND_EXPR_COND (*stmt_p));
1594 /* If neither arm does anything at all, we can remove the whole IF. */
1595 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1597 *stmt_p = build_empty_stmt ();
1598 data->repeat = true;
1601 /* If there are no reachable statements in an arm, then we can
1602 zap the entire conditional. */
1603 else if (integer_nonzerop (cond) && !else_has_label)
1605 if (warn_notreached)
1606 remove_useless_stmts_warn_notreached (else_clause);
1607 *stmt_p = then_clause;
1608 data->repeat = true;
1610 else if (integer_zerop (cond) && !then_has_label)
1612 if (warn_notreached)
1613 remove_useless_stmts_warn_notreached (then_clause);
1614 *stmt_p = else_clause;
1615 data->repeat = true;
1618 /* Check a couple of simple things on then/else with single stmts. */
1619 else
1621 tree then_stmt = expr_only (then_clause);
1622 tree else_stmt = expr_only (else_clause);
1624 /* Notice branches to a common destination. */
1625 if (then_stmt && else_stmt
1626 && TREE_CODE (then_stmt) == GOTO_EXPR
1627 && TREE_CODE (else_stmt) == GOTO_EXPR
1628 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1630 *stmt_p = then_stmt;
1631 data->repeat = true;
1634 /* If the THEN/ELSE clause merely assigns a value to a variable or
1635 parameter which is already known to contain that value, then
1636 remove the useless THEN/ELSE clause. */
1637 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1639 if (else_stmt
1640 && TREE_CODE (else_stmt) == MODIFY_EXPR
1641 && TREE_OPERAND (else_stmt, 0) == cond
1642 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1643 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1645 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1646 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1647 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1648 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1650 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1651 ? then_stmt : else_stmt);
1652 tree *location = (TREE_CODE (cond) == EQ_EXPR
1653 ? &COND_EXPR_THEN (*stmt_p)
1654 : &COND_EXPR_ELSE (*stmt_p));
1656 if (stmt
1657 && TREE_CODE (stmt) == MODIFY_EXPR
1658 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1659 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1660 *location = alloc_stmt_list ();
1664 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1665 would be re-introduced during lowering. */
1666 data->last_goto = NULL;
1670 static void
1671 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1673 bool save_may_branch, save_may_throw;
1674 bool this_may_branch, this_may_throw;
1676 /* Collect may_branch and may_throw information for the body only. */
1677 save_may_branch = data->may_branch;
1678 save_may_throw = data->may_throw;
1679 data->may_branch = false;
1680 data->may_throw = false;
1681 data->last_goto = NULL;
1683 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1685 this_may_branch = data->may_branch;
1686 this_may_throw = data->may_throw;
1687 data->may_branch |= save_may_branch;
1688 data->may_throw |= save_may_throw;
1689 data->last_goto = NULL;
1691 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1693 /* If the body is empty, then we can emit the FINALLY block without
1694 the enclosing TRY_FINALLY_EXPR. */
1695 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1697 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1698 data->repeat = true;
1701 /* If the handler is empty, then we can emit the TRY block without
1702 the enclosing TRY_FINALLY_EXPR. */
1703 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1705 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1706 data->repeat = true;
1709 /* If the body neither throws, nor branches, then we can safely
1710 string the TRY and FINALLY blocks together. */
1711 else if (!this_may_branch && !this_may_throw)
1713 tree stmt = *stmt_p;
1714 *stmt_p = TREE_OPERAND (stmt, 0);
1715 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1716 data->repeat = true;
1721 static void
1722 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1724 bool save_may_throw, this_may_throw;
1725 tree_stmt_iterator i;
1726 tree stmt;
1728 /* Collect may_throw information for the body only. */
1729 save_may_throw = data->may_throw;
1730 data->may_throw = false;
1731 data->last_goto = NULL;
1733 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1735 this_may_throw = data->may_throw;
1736 data->may_throw = save_may_throw;
1738 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1739 if (!this_may_throw)
1741 if (warn_notreached)
1742 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1743 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1744 data->repeat = true;
1745 return;
1748 /* Process the catch clause specially. We may be able to tell that
1749 no exceptions propagate past this point. */
1751 this_may_throw = true;
1752 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1753 stmt = tsi_stmt (i);
1754 data->last_goto = NULL;
1756 switch (TREE_CODE (stmt))
1758 case CATCH_EXPR:
1759 for (; !tsi_end_p (i); tsi_next (&i))
1761 stmt = tsi_stmt (i);
1762 /* If we catch all exceptions, then the body does not
1763 propagate exceptions past this point. */
1764 if (CATCH_TYPES (stmt) == NULL)
1765 this_may_throw = false;
1766 data->last_goto = NULL;
1767 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1769 break;
1771 case EH_FILTER_EXPR:
1772 if (EH_FILTER_MUST_NOT_THROW (stmt))
1773 this_may_throw = false;
1774 else if (EH_FILTER_TYPES (stmt) == NULL)
1775 this_may_throw = false;
1776 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1777 break;
1779 default:
1780 /* Otherwise this is a cleanup. */
1781 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1783 /* If the cleanup is empty, then we can emit the TRY block without
1784 the enclosing TRY_CATCH_EXPR. */
1785 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1787 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1788 data->repeat = true;
1790 break;
1792 data->may_throw |= this_may_throw;
1796 static void
1797 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1799 tree block;
1801 /* First remove anything underneath the BIND_EXPR. */
1802 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1804 /* If the BIND_EXPR has no variables, then we can pull everything
1805 up one level and remove the BIND_EXPR, unless this is the toplevel
1806 BIND_EXPR for the current function or an inlined function.
1808 When this situation occurs we will want to apply this
1809 optimization again. */
1810 block = BIND_EXPR_BLOCK (*stmt_p);
1811 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1812 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1813 && (! block
1814 || ! BLOCK_ABSTRACT_ORIGIN (block)
1815 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1816 != FUNCTION_DECL)))
1818 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1819 data->repeat = true;
1824 static void
1825 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1827 tree dest = GOTO_DESTINATION (*stmt_p);
1829 data->may_branch = true;
1830 data->last_goto = NULL;
1832 /* Record the last goto expr, so that we can delete it if unnecessary. */
1833 if (TREE_CODE (dest) == LABEL_DECL)
1834 data->last_goto = stmt_p;
1838 static void
1839 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1841 tree label = LABEL_EXPR_LABEL (*stmt_p);
1843 data->has_label = true;
1845 /* We do want to jump across non-local label receiver code. */
1846 if (DECL_NONLOCAL (label))
1847 data->last_goto = NULL;
1849 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1851 *data->last_goto = build_empty_stmt ();
1852 data->repeat = true;
1855 /* ??? Add something here to delete unused labels. */
1859 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1860 decl. This allows us to eliminate redundant or useless
1861 calls to "const" functions.
1863 Gimplifier already does the same operation, but we may notice functions
1864 being const and pure once their calls has been gimplified, so we need
1865 to update the flag. */
1867 static void
1868 update_call_expr_flags (tree call)
1870 tree decl = get_callee_fndecl (call);
1871 if (!decl)
1872 return;
1873 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1874 TREE_SIDE_EFFECTS (call) = 0;
1875 if (TREE_NOTHROW (decl))
1876 TREE_NOTHROW (call) = 1;
1880 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1882 void
1883 notice_special_calls (tree t)
1885 int flags = call_expr_flags (t);
1887 if (flags & ECF_MAY_BE_ALLOCA)
1888 current_function_calls_alloca = true;
1889 if (flags & ECF_RETURNS_TWICE)
1890 current_function_calls_setjmp = true;
1894 /* Clear flags set by notice_special_calls. Used by dead code removal
1895 to update the flags. */
1897 void
1898 clear_special_calls (void)
1900 current_function_calls_alloca = false;
1901 current_function_calls_setjmp = false;
1905 static void
1906 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1908 tree t = *tp, op;
1910 switch (TREE_CODE (t))
1912 case COND_EXPR:
1913 remove_useless_stmts_cond (tp, data);
1914 break;
1916 case TRY_FINALLY_EXPR:
1917 remove_useless_stmts_tf (tp, data);
1918 break;
1920 case TRY_CATCH_EXPR:
1921 remove_useless_stmts_tc (tp, data);
1922 break;
1924 case BIND_EXPR:
1925 remove_useless_stmts_bind (tp, data);
1926 break;
1928 case GOTO_EXPR:
1929 remove_useless_stmts_goto (tp, data);
1930 break;
1932 case LABEL_EXPR:
1933 remove_useless_stmts_label (tp, data);
1934 break;
1936 case RETURN_EXPR:
1937 fold_stmt (tp);
1938 data->last_goto = NULL;
1939 data->may_branch = true;
1940 break;
1942 case CALL_EXPR:
1943 fold_stmt (tp);
1944 data->last_goto = NULL;
1945 notice_special_calls (t);
1946 update_call_expr_flags (t);
1947 if (tree_could_throw_p (t))
1948 data->may_throw = true;
1949 break;
1951 case MODIFY_EXPR:
1952 data->last_goto = NULL;
1953 fold_stmt (tp);
1954 op = get_call_expr_in (t);
1955 if (op)
1957 update_call_expr_flags (op);
1958 notice_special_calls (op);
1960 if (tree_could_throw_p (t))
1961 data->may_throw = true;
1962 break;
1964 case STATEMENT_LIST:
1966 tree_stmt_iterator i = tsi_start (t);
1967 while (!tsi_end_p (i))
1969 t = tsi_stmt (i);
1970 if (IS_EMPTY_STMT (t))
1972 tsi_delink (&i);
1973 continue;
1976 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1978 t = tsi_stmt (i);
1979 if (TREE_CODE (t) == STATEMENT_LIST)
1981 tsi_link_before (&i, t, TSI_SAME_STMT);
1982 tsi_delink (&i);
1984 else
1985 tsi_next (&i);
1988 break;
1989 case ASM_EXPR:
1990 fold_stmt (tp);
1991 data->last_goto = NULL;
1992 break;
1994 default:
1995 data->last_goto = NULL;
1996 break;
2000 static void
2001 remove_useless_stmts (void)
2003 struct rus_data data;
2005 clear_special_calls ();
2009 memset (&data, 0, sizeof (data));
2010 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
2012 while (data.repeat);
2016 struct tree_opt_pass pass_remove_useless_stmts =
2018 "useless", /* name */
2019 NULL, /* gate */
2020 remove_useless_stmts, /* execute */
2021 NULL, /* sub */
2022 NULL, /* next */
2023 0, /* static_pass_number */
2024 0, /* tv_id */
2025 PROP_gimple_any, /* properties_required */
2026 0, /* properties_provided */
2027 0, /* properties_destroyed */
2028 0, /* todo_flags_start */
2029 TODO_dump_func, /* todo_flags_finish */
2030 0 /* letter */
2033 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2035 static void
2036 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2038 tree phi;
2040 /* Since this block is no longer reachable, we can just delete all
2041 of its PHI nodes. */
2042 phi = phi_nodes (bb);
2043 while (phi)
2045 tree next = PHI_CHAIN (phi);
2046 remove_phi_node (phi, NULL_TREE);
2047 phi = next;
2050 /* Remove edges to BB's successors. */
2051 while (EDGE_COUNT (bb->succs) > 0)
2052 remove_edge (EDGE_SUCC (bb, 0));
2056 /* Remove statements of basic block BB. */
2058 static void
2059 remove_bb (basic_block bb)
2061 block_stmt_iterator i;
2062 #ifdef USE_MAPPED_LOCATION
2063 source_location loc = UNKNOWN_LOCATION;
2064 #else
2065 source_locus loc = 0;
2066 #endif
2068 if (dump_file)
2070 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2071 if (dump_flags & TDF_DETAILS)
2073 dump_bb (bb, dump_file, 0);
2074 fprintf (dump_file, "\n");
2078 /* If we remove the header or the latch of a loop, mark the loop for
2079 removal by setting its header and latch to NULL. */
2080 if (current_loops)
2082 struct loop *loop = bb->loop_father;
2084 if (loop->latch == bb
2085 || loop->header == bb)
2087 loop->latch = NULL;
2088 loop->header = NULL;
2090 /* Also clean up the information associated with the loop. Updating
2091 it would waste time. More importantly, it may refer to ssa
2092 names that were defined in other removed basic block -- these
2093 ssa names are now removed and invalid. */
2094 free_numbers_of_iterations_estimates_loop (loop);
2098 /* Remove all the instructions in the block. */
2099 for (i = bsi_start (bb); !bsi_end_p (i);)
2101 tree stmt = bsi_stmt (i);
2102 if (TREE_CODE (stmt) == LABEL_EXPR
2103 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2104 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2106 basic_block new_bb;
2107 block_stmt_iterator new_bsi;
2109 /* A non-reachable non-local label may still be referenced.
2110 But it no longer needs to carry the extra semantics of
2111 non-locality. */
2112 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2114 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2115 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2118 new_bb = bb->prev_bb;
2119 new_bsi = bsi_start (new_bb);
2120 bsi_remove (&i, false);
2121 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2123 else
2125 /* Release SSA definitions if we are in SSA. Note that we
2126 may be called when not in SSA. For example,
2127 final_cleanup calls this function via
2128 cleanup_tree_cfg. */
2129 if (in_ssa_p)
2130 release_defs (stmt);
2132 bsi_remove (&i, true);
2135 /* Don't warn for removed gotos. Gotos are often removed due to
2136 jump threading, thus resulting in bogus warnings. Not great,
2137 since this way we lose warnings for gotos in the original
2138 program that are indeed unreachable. */
2139 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2141 #ifdef USE_MAPPED_LOCATION
2142 if (EXPR_HAS_LOCATION (stmt))
2143 loc = EXPR_LOCATION (stmt);
2144 #else
2145 source_locus t;
2146 t = EXPR_LOCUS (stmt);
2147 if (t && LOCATION_LINE (*t) > 0)
2148 loc = t;
2149 #endif
2153 /* If requested, give a warning that the first statement in the
2154 block is unreachable. We walk statements backwards in the
2155 loop above, so the last statement we process is the first statement
2156 in the block. */
2157 #ifdef USE_MAPPED_LOCATION
2158 if (loc > BUILTINS_LOCATION)
2159 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2160 #else
2161 if (loc)
2162 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2163 #endif
2165 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2169 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2170 predicate VAL, return the edge that will be taken out of the block.
2171 If VAL does not match a unique edge, NULL is returned. */
2173 edge
2174 find_taken_edge (basic_block bb, tree val)
2176 tree stmt;
2178 stmt = last_stmt (bb);
2180 gcc_assert (stmt);
2181 gcc_assert (is_ctrl_stmt (stmt));
2182 gcc_assert (val);
2184 if (! is_gimple_min_invariant (val))
2185 return NULL;
2187 if (TREE_CODE (stmt) == COND_EXPR)
2188 return find_taken_edge_cond_expr (bb, val);
2190 if (TREE_CODE (stmt) == SWITCH_EXPR)
2191 return find_taken_edge_switch_expr (bb, val);
2193 if (computed_goto_p (stmt))
2194 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2196 gcc_unreachable ();
2199 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2200 statement, determine which of the outgoing edges will be taken out of the
2201 block. Return NULL if either edge may be taken. */
2203 static edge
2204 find_taken_edge_computed_goto (basic_block bb, tree val)
2206 basic_block dest;
2207 edge e = NULL;
2209 dest = label_to_block (val);
2210 if (dest)
2212 e = find_edge (bb, dest);
2213 gcc_assert (e != NULL);
2216 return e;
2219 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2220 statement, determine which of the two edges will be taken out of the
2221 block. Return NULL if either edge may be taken. */
2223 static edge
2224 find_taken_edge_cond_expr (basic_block bb, tree val)
2226 edge true_edge, false_edge;
2228 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2230 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2231 return (zero_p (val) ? false_edge : true_edge);
2234 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2235 statement, determine which edge will be taken out of the block. Return
2236 NULL if any edge may be taken. */
2238 static edge
2239 find_taken_edge_switch_expr (basic_block bb, tree val)
2241 tree switch_expr, taken_case;
2242 basic_block dest_bb;
2243 edge e;
2245 switch_expr = last_stmt (bb);
2246 taken_case = find_case_label_for_value (switch_expr, val);
2247 dest_bb = label_to_block (CASE_LABEL (taken_case));
2249 e = find_edge (bb, dest_bb);
2250 gcc_assert (e);
2251 return e;
2255 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2256 We can make optimal use here of the fact that the case labels are
2257 sorted: We can do a binary search for a case matching VAL. */
2259 static tree
2260 find_case_label_for_value (tree switch_expr, tree val)
2262 tree vec = SWITCH_LABELS (switch_expr);
2263 size_t low, high, n = TREE_VEC_LENGTH (vec);
2264 tree default_case = TREE_VEC_ELT (vec, n - 1);
2266 for (low = -1, high = n - 1; high - low > 1; )
2268 size_t i = (high + low) / 2;
2269 tree t = TREE_VEC_ELT (vec, i);
2270 int cmp;
2272 /* Cache the result of comparing CASE_LOW and val. */
2273 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2275 if (cmp > 0)
2276 high = i;
2277 else
2278 low = i;
2280 if (CASE_HIGH (t) == NULL)
2282 /* A singe-valued case label. */
2283 if (cmp == 0)
2284 return t;
2286 else
2288 /* A case range. We can only handle integer ranges. */
2289 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2290 return t;
2294 return default_case;
2300 /*---------------------------------------------------------------------------
2301 Debugging functions
2302 ---------------------------------------------------------------------------*/
2304 /* Dump tree-specific information of block BB to file OUTF. */
2306 void
2307 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2309 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2313 /* Dump a basic block on stderr. */
2315 void
2316 debug_tree_bb (basic_block bb)
2318 dump_bb (bb, stderr, 0);
2322 /* Dump basic block with index N on stderr. */
2324 basic_block
2325 debug_tree_bb_n (int n)
2327 debug_tree_bb (BASIC_BLOCK (n));
2328 return BASIC_BLOCK (n);
2332 /* Dump the CFG on stderr.
2334 FLAGS are the same used by the tree dumping functions
2335 (see TDF_* in tree.h). */
2337 void
2338 debug_tree_cfg (int flags)
2340 dump_tree_cfg (stderr, flags);
2344 /* Dump the program showing basic block boundaries on the given FILE.
2346 FLAGS are the same used by the tree dumping functions (see TDF_* in
2347 tree.h). */
2349 void
2350 dump_tree_cfg (FILE *file, int flags)
2352 if (flags & TDF_DETAILS)
2354 const char *funcname
2355 = lang_hooks.decl_printable_name (current_function_decl, 2);
2357 fputc ('\n', file);
2358 fprintf (file, ";; Function %s\n\n", funcname);
2359 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2360 n_basic_blocks, n_edges, last_basic_block);
2362 brief_dump_cfg (file);
2363 fprintf (file, "\n");
2366 if (flags & TDF_STATS)
2367 dump_cfg_stats (file);
2369 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2373 /* Dump CFG statistics on FILE. */
2375 void
2376 dump_cfg_stats (FILE *file)
2378 static long max_num_merged_labels = 0;
2379 unsigned long size, total = 0;
2380 long num_edges;
2381 basic_block bb;
2382 const char * const fmt_str = "%-30s%-13s%12s\n";
2383 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2384 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2385 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2386 const char *funcname
2387 = lang_hooks.decl_printable_name (current_function_decl, 2);
2390 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2392 fprintf (file, "---------------------------------------------------------\n");
2393 fprintf (file, fmt_str, "", " Number of ", "Memory");
2394 fprintf (file, fmt_str, "", " instances ", "used ");
2395 fprintf (file, "---------------------------------------------------------\n");
2397 size = n_basic_blocks * sizeof (struct basic_block_def);
2398 total += size;
2399 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2400 SCALE (size), LABEL (size));
2402 num_edges = 0;
2403 FOR_EACH_BB (bb)
2404 num_edges += EDGE_COUNT (bb->succs);
2405 size = num_edges * sizeof (struct edge_def);
2406 total += size;
2407 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2409 fprintf (file, "---------------------------------------------------------\n");
2410 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2411 LABEL (total));
2412 fprintf (file, "---------------------------------------------------------\n");
2413 fprintf (file, "\n");
2415 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2416 max_num_merged_labels = cfg_stats.num_merged_labels;
2418 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2419 cfg_stats.num_merged_labels, max_num_merged_labels);
2421 fprintf (file, "\n");
2425 /* Dump CFG statistics on stderr. Keep extern so that it's always
2426 linked in the final executable. */
2428 void
2429 debug_cfg_stats (void)
2431 dump_cfg_stats (stderr);
2435 /* Dump the flowgraph to a .vcg FILE. */
2437 static void
2438 tree_cfg2vcg (FILE *file)
2440 edge e;
2441 edge_iterator ei;
2442 basic_block bb;
2443 const char *funcname
2444 = lang_hooks.decl_printable_name (current_function_decl, 2);
2446 /* Write the file header. */
2447 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2448 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2449 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2451 /* Write blocks and edges. */
2452 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2454 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2455 e->dest->index);
2457 if (e->flags & EDGE_FAKE)
2458 fprintf (file, " linestyle: dotted priority: 10");
2459 else
2460 fprintf (file, " linestyle: solid priority: 100");
2462 fprintf (file, " }\n");
2464 fputc ('\n', file);
2466 FOR_EACH_BB (bb)
2468 enum tree_code head_code, end_code;
2469 const char *head_name, *end_name;
2470 int head_line = 0;
2471 int end_line = 0;
2472 tree first = first_stmt (bb);
2473 tree last = last_stmt (bb);
2475 if (first)
2477 head_code = TREE_CODE (first);
2478 head_name = tree_code_name[head_code];
2479 head_line = get_lineno (first);
2481 else
2482 head_name = "no-statement";
2484 if (last)
2486 end_code = TREE_CODE (last);
2487 end_name = tree_code_name[end_code];
2488 end_line = get_lineno (last);
2490 else
2491 end_name = "no-statement";
2493 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2494 bb->index, bb->index, head_name, head_line, end_name,
2495 end_line);
2497 FOR_EACH_EDGE (e, ei, bb->succs)
2499 if (e->dest == EXIT_BLOCK_PTR)
2500 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2501 else
2502 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2504 if (e->flags & EDGE_FAKE)
2505 fprintf (file, " priority: 10 linestyle: dotted");
2506 else
2507 fprintf (file, " priority: 100 linestyle: solid");
2509 fprintf (file, " }\n");
2512 if (bb->next_bb != EXIT_BLOCK_PTR)
2513 fputc ('\n', file);
2516 fputs ("}\n\n", file);
2521 /*---------------------------------------------------------------------------
2522 Miscellaneous helpers
2523 ---------------------------------------------------------------------------*/
2525 /* Return true if T represents a stmt that always transfers control. */
2527 bool
2528 is_ctrl_stmt (tree t)
2530 return (TREE_CODE (t) == COND_EXPR
2531 || TREE_CODE (t) == SWITCH_EXPR
2532 || TREE_CODE (t) == GOTO_EXPR
2533 || TREE_CODE (t) == RETURN_EXPR
2534 || TREE_CODE (t) == RESX_EXPR);
2538 /* Return true if T is a statement that may alter the flow of control
2539 (e.g., a call to a non-returning function). */
2541 bool
2542 is_ctrl_altering_stmt (tree t)
2544 tree call;
2546 gcc_assert (t);
2547 call = get_call_expr_in (t);
2548 if (call)
2550 /* A non-pure/const CALL_EXPR alters flow control if the current
2551 function has nonlocal labels. */
2552 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2553 return true;
2555 /* A CALL_EXPR also alters control flow if it does not return. */
2556 if (call_expr_flags (call) & ECF_NORETURN)
2557 return true;
2560 /* OpenMP directives alter control flow. */
2561 if (flag_openmp && OMP_DIRECTIVE_P (t))
2562 return true;
2564 /* If a statement can throw, it alters control flow. */
2565 return tree_can_throw_internal (t);
2569 /* Return true if T is a computed goto. */
2571 bool
2572 computed_goto_p (tree t)
2574 return (TREE_CODE (t) == GOTO_EXPR
2575 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2579 /* Checks whether EXPR is a simple local goto. */
2581 bool
2582 simple_goto_p (tree expr)
2584 return (TREE_CODE (expr) == GOTO_EXPR
2585 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2589 /* Return true if T should start a new basic block. PREV_T is the
2590 statement preceding T. It is used when T is a label or a case label.
2591 Labels should only start a new basic block if their previous statement
2592 wasn't a label. Otherwise, sequence of labels would generate
2593 unnecessary basic blocks that only contain a single label. */
2595 static inline bool
2596 stmt_starts_bb_p (tree t, tree prev_t)
2598 if (t == NULL_TREE)
2599 return false;
2601 /* LABEL_EXPRs start a new basic block only if the preceding
2602 statement wasn't a label of the same type. This prevents the
2603 creation of consecutive blocks that have nothing but a single
2604 label. */
2605 if (TREE_CODE (t) == LABEL_EXPR)
2607 /* Nonlocal and computed GOTO targets always start a new block. */
2608 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2609 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2610 return true;
2612 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2614 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2615 return true;
2617 cfg_stats.num_merged_labels++;
2618 return false;
2620 else
2621 return true;
2624 return false;
2628 /* Return true if T should end a basic block. */
2630 bool
2631 stmt_ends_bb_p (tree t)
2633 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2637 /* Add gotos that used to be represented implicitly in the CFG. */
2639 void
2640 disband_implicit_edges (void)
2642 basic_block bb;
2643 block_stmt_iterator last;
2644 edge e;
2645 edge_iterator ei;
2646 tree stmt, label;
2648 FOR_EACH_BB (bb)
2650 last = bsi_last (bb);
2651 stmt = last_stmt (bb);
2653 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2655 /* Remove superfluous gotos from COND_EXPR branches. Moved
2656 from cfg_remove_useless_stmts here since it violates the
2657 invariants for tree--cfg correspondence and thus fits better
2658 here where we do it anyway. */
2659 e = find_edge (bb, bb->next_bb);
2660 if (e)
2662 if (e->flags & EDGE_TRUE_VALUE)
2663 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2664 else if (e->flags & EDGE_FALSE_VALUE)
2665 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2666 else
2667 gcc_unreachable ();
2668 e->flags |= EDGE_FALLTHRU;
2671 continue;
2674 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2676 /* Remove the RETURN_EXPR if we may fall though to the exit
2677 instead. */
2678 gcc_assert (single_succ_p (bb));
2679 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2681 if (bb->next_bb == EXIT_BLOCK_PTR
2682 && !TREE_OPERAND (stmt, 0))
2684 bsi_remove (&last, true);
2685 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2687 continue;
2690 /* There can be no fallthru edge if the last statement is a control
2691 one. */
2692 if (stmt && is_ctrl_stmt (stmt))
2693 continue;
2695 /* Find a fallthru edge and emit the goto if necessary. */
2696 FOR_EACH_EDGE (e, ei, bb->succs)
2697 if (e->flags & EDGE_FALLTHRU)
2698 break;
2700 if (!e || e->dest == bb->next_bb)
2701 continue;
2703 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2704 label = tree_block_label (e->dest);
2706 stmt = build1 (GOTO_EXPR, void_type_node, label);
2707 #ifdef USE_MAPPED_LOCATION
2708 SET_EXPR_LOCATION (stmt, e->goto_locus);
2709 #else
2710 SET_EXPR_LOCUS (stmt, e->goto_locus);
2711 #endif
2712 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2713 e->flags &= ~EDGE_FALLTHRU;
2717 /* Remove block annotations and other datastructures. */
2719 void
2720 delete_tree_cfg_annotations (void)
2722 label_to_block_map = NULL;
2726 /* Return the first statement in basic block BB. */
2728 tree
2729 first_stmt (basic_block bb)
2731 block_stmt_iterator i = bsi_start (bb);
2732 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2736 /* Return the last statement in basic block BB. */
2738 tree
2739 last_stmt (basic_block bb)
2741 block_stmt_iterator b = bsi_last (bb);
2742 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2746 /* Return a pointer to the last statement in block BB. */
2748 tree *
2749 last_stmt_ptr (basic_block bb)
2751 block_stmt_iterator last = bsi_last (bb);
2752 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2756 /* Return the last statement of an otherwise empty block. Return NULL
2757 if the block is totally empty, or if it contains more than one
2758 statement. */
2760 tree
2761 last_and_only_stmt (basic_block bb)
2763 block_stmt_iterator i = bsi_last (bb);
2764 tree last, prev;
2766 if (bsi_end_p (i))
2767 return NULL_TREE;
2769 last = bsi_stmt (i);
2770 bsi_prev (&i);
2771 if (bsi_end_p (i))
2772 return last;
2774 /* Empty statements should no longer appear in the instruction stream.
2775 Everything that might have appeared before should be deleted by
2776 remove_useless_stmts, and the optimizers should just bsi_remove
2777 instead of smashing with build_empty_stmt.
2779 Thus the only thing that should appear here in a block containing
2780 one executable statement is a label. */
2781 prev = bsi_stmt (i);
2782 if (TREE_CODE (prev) == LABEL_EXPR)
2783 return last;
2784 else
2785 return NULL_TREE;
2789 /* Mark BB as the basic block holding statement T. */
2791 void
2792 set_bb_for_stmt (tree t, basic_block bb)
2794 if (TREE_CODE (t) == PHI_NODE)
2795 PHI_BB (t) = bb;
2796 else if (TREE_CODE (t) == STATEMENT_LIST)
2798 tree_stmt_iterator i;
2799 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2800 set_bb_for_stmt (tsi_stmt (i), bb);
2802 else
2804 stmt_ann_t ann = get_stmt_ann (t);
2805 ann->bb = bb;
2807 /* If the statement is a label, add the label to block-to-labels map
2808 so that we can speed up edge creation for GOTO_EXPRs. */
2809 if (TREE_CODE (t) == LABEL_EXPR)
2811 int uid;
2813 t = LABEL_EXPR_LABEL (t);
2814 uid = LABEL_DECL_UID (t);
2815 if (uid == -1)
2817 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2818 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2819 if (old_len <= (unsigned) uid)
2821 basic_block *addr;
2822 unsigned new_len = 3 * uid / 2;
2824 VEC_safe_grow (basic_block, gc, label_to_block_map,
2825 new_len);
2826 addr = VEC_address (basic_block, label_to_block_map);
2827 memset (&addr[old_len],
2828 0, sizeof (basic_block) * (new_len - old_len));
2831 else
2832 /* We're moving an existing label. Make sure that we've
2833 removed it from the old block. */
2834 gcc_assert (!bb
2835 || !VEC_index (basic_block, label_to_block_map, uid));
2836 VEC_replace (basic_block, label_to_block_map, uid, bb);
2841 /* Finds iterator for STMT. */
2843 extern block_stmt_iterator
2844 bsi_for_stmt (tree stmt)
2846 block_stmt_iterator bsi;
2848 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2849 if (bsi_stmt (bsi) == stmt)
2850 return bsi;
2852 gcc_unreachable ();
2855 /* Mark statement T as modified, and update it. */
2856 static inline void
2857 update_modified_stmts (tree t)
2859 if (TREE_CODE (t) == STATEMENT_LIST)
2861 tree_stmt_iterator i;
2862 tree stmt;
2863 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2865 stmt = tsi_stmt (i);
2866 update_stmt_if_modified (stmt);
2869 else
2870 update_stmt_if_modified (t);
2873 /* Insert statement (or statement list) T before the statement
2874 pointed-to by iterator I. M specifies how to update iterator I
2875 after insertion (see enum bsi_iterator_update). */
2877 void
2878 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2880 set_bb_for_stmt (t, i->bb);
2881 update_modified_stmts (t);
2882 tsi_link_before (&i->tsi, t, m);
2886 /* Insert statement (or statement list) T after the statement
2887 pointed-to by iterator I. M specifies how to update iterator I
2888 after insertion (see enum bsi_iterator_update). */
2890 void
2891 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2893 set_bb_for_stmt (t, i->bb);
2894 update_modified_stmts (t);
2895 tsi_link_after (&i->tsi, t, m);
2899 /* Remove the statement pointed to by iterator I. The iterator is updated
2900 to the next statement.
2902 When REMOVE_EH_INFO is true we remove the statement pointed to by
2903 iterator I from the EH tables. Otherwise we do not modify the EH
2904 tables.
2906 Generally, REMOVE_EH_INFO should be true when the statement is going to
2907 be removed from the IL and not reinserted elsewhere. */
2909 void
2910 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2912 tree t = bsi_stmt (*i);
2913 set_bb_for_stmt (t, NULL);
2914 delink_stmt_imm_use (t);
2915 tsi_delink (&i->tsi);
2916 mark_stmt_modified (t);
2917 if (remove_eh_info)
2918 remove_stmt_from_eh_region (t);
2922 /* Move the statement at FROM so it comes right after the statement at TO. */
2924 void
2925 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2927 tree stmt = bsi_stmt (*from);
2928 bsi_remove (from, false);
2929 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2933 /* Move the statement at FROM so it comes right before the statement at TO. */
2935 void
2936 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2938 tree stmt = bsi_stmt (*from);
2939 bsi_remove (from, false);
2940 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2944 /* Move the statement at FROM to the end of basic block BB. */
2946 void
2947 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2949 block_stmt_iterator last = bsi_last (bb);
2951 /* Have to check bsi_end_p because it could be an empty block. */
2952 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2953 bsi_move_before (from, &last);
2954 else
2955 bsi_move_after (from, &last);
2959 /* Replace the contents of the statement pointed to by iterator BSI
2960 with STMT. If UPDATE_EH_INFO is true, the exception handling
2961 information of the original statement is moved to the new statement. */
2964 void
2965 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2967 int eh_region;
2968 tree orig_stmt = bsi_stmt (*bsi);
2970 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2971 set_bb_for_stmt (stmt, bsi->bb);
2973 /* Preserve EH region information from the original statement, if
2974 requested by the caller. */
2975 if (update_eh_info)
2977 eh_region = lookup_stmt_eh_region (orig_stmt);
2978 if (eh_region >= 0)
2980 remove_stmt_from_eh_region (orig_stmt);
2981 add_stmt_to_eh_region (stmt, eh_region);
2985 delink_stmt_imm_use (orig_stmt);
2986 *bsi_stmt_ptr (*bsi) = stmt;
2987 mark_stmt_modified (stmt);
2988 update_modified_stmts (stmt);
2992 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2993 is made to place the statement in an existing basic block, but
2994 sometimes that isn't possible. When it isn't possible, the edge is
2995 split and the statement is added to the new block.
2997 In all cases, the returned *BSI points to the correct location. The
2998 return value is true if insertion should be done after the location,
2999 or false if it should be done before the location. If new basic block
3000 has to be created, it is stored in *NEW_BB. */
3002 static bool
3003 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3004 basic_block *new_bb)
3006 basic_block dest, src;
3007 tree tmp;
3009 dest = e->dest;
3010 restart:
3012 /* If the destination has one predecessor which has no PHI nodes,
3013 insert there. Except for the exit block.
3015 The requirement for no PHI nodes could be relaxed. Basically we
3016 would have to examine the PHIs to prove that none of them used
3017 the value set by the statement we want to insert on E. That
3018 hardly seems worth the effort. */
3019 if (single_pred_p (dest)
3020 && ! phi_nodes (dest)
3021 && dest != EXIT_BLOCK_PTR)
3023 *bsi = bsi_start (dest);
3024 if (bsi_end_p (*bsi))
3025 return true;
3027 /* Make sure we insert after any leading labels. */
3028 tmp = bsi_stmt (*bsi);
3029 while (TREE_CODE (tmp) == LABEL_EXPR)
3031 bsi_next (bsi);
3032 if (bsi_end_p (*bsi))
3033 break;
3034 tmp = bsi_stmt (*bsi);
3037 if (bsi_end_p (*bsi))
3039 *bsi = bsi_last (dest);
3040 return true;
3042 else
3043 return false;
3046 /* If the source has one successor, the edge is not abnormal and
3047 the last statement does not end a basic block, insert there.
3048 Except for the entry block. */
3049 src = e->src;
3050 if ((e->flags & EDGE_ABNORMAL) == 0
3051 && single_succ_p (src)
3052 && src != ENTRY_BLOCK_PTR)
3054 *bsi = bsi_last (src);
3055 if (bsi_end_p (*bsi))
3056 return true;
3058 tmp = bsi_stmt (*bsi);
3059 if (!stmt_ends_bb_p (tmp))
3060 return true;
3062 /* Insert code just before returning the value. We may need to decompose
3063 the return in the case it contains non-trivial operand. */
3064 if (TREE_CODE (tmp) == RETURN_EXPR)
3066 tree op = TREE_OPERAND (tmp, 0);
3067 if (op && !is_gimple_val (op))
3069 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3070 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3071 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3073 bsi_prev (bsi);
3074 return true;
3078 /* Otherwise, create a new basic block, and split this edge. */
3079 dest = split_edge (e);
3080 if (new_bb)
3081 *new_bb = dest;
3082 e = single_pred_edge (dest);
3083 goto restart;
3087 /* This routine will commit all pending edge insertions, creating any new
3088 basic blocks which are necessary. */
3090 void
3091 bsi_commit_edge_inserts (void)
3093 basic_block bb;
3094 edge e;
3095 edge_iterator ei;
3097 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3099 FOR_EACH_BB (bb)
3100 FOR_EACH_EDGE (e, ei, bb->succs)
3101 bsi_commit_one_edge_insert (e, NULL);
3105 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3106 to this block, otherwise set it to NULL. */
3108 void
3109 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3111 if (new_bb)
3112 *new_bb = NULL;
3113 if (PENDING_STMT (e))
3115 block_stmt_iterator bsi;
3116 tree stmt = PENDING_STMT (e);
3118 PENDING_STMT (e) = NULL_TREE;
3120 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3121 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3122 else
3123 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3128 /* Add STMT to the pending list of edge E. No actual insertion is
3129 made until a call to bsi_commit_edge_inserts () is made. */
3131 void
3132 bsi_insert_on_edge (edge e, tree stmt)
3134 append_to_statement_list (stmt, &PENDING_STMT (e));
3137 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3138 block has to be created, it is returned. */
3140 basic_block
3141 bsi_insert_on_edge_immediate (edge e, tree stmt)
3143 block_stmt_iterator bsi;
3144 basic_block new_bb = NULL;
3146 gcc_assert (!PENDING_STMT (e));
3148 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3149 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3150 else
3151 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3153 return new_bb;
3156 /*---------------------------------------------------------------------------
3157 Tree specific functions for CFG manipulation
3158 ---------------------------------------------------------------------------*/
3160 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3162 static void
3163 reinstall_phi_args (edge new_edge, edge old_edge)
3165 tree var, phi;
3167 if (!PENDING_STMT (old_edge))
3168 return;
3170 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3171 var && phi;
3172 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3174 tree result = TREE_PURPOSE (var);
3175 tree arg = TREE_VALUE (var);
3177 gcc_assert (result == PHI_RESULT (phi));
3179 add_phi_arg (phi, arg, new_edge);
3182 PENDING_STMT (old_edge) = NULL;
3185 /* Returns the basic block after that the new basic block created
3186 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3187 near its "logical" location. This is of most help to humans looking
3188 at debugging dumps. */
3190 static basic_block
3191 split_edge_bb_loc (edge edge_in)
3193 basic_block dest = edge_in->dest;
3195 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3196 return edge_in->src;
3197 else
3198 return dest->prev_bb;
3201 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3202 Abort on abnormal edges. */
3204 static basic_block
3205 tree_split_edge (edge edge_in)
3207 basic_block new_bb, after_bb, dest, src;
3208 edge new_edge, e;
3210 /* Abnormal edges cannot be split. */
3211 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3213 src = edge_in->src;
3214 dest = edge_in->dest;
3216 after_bb = split_edge_bb_loc (edge_in);
3218 new_bb = create_empty_bb (after_bb);
3219 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3220 new_bb->count = edge_in->count;
3221 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3222 new_edge->probability = REG_BR_PROB_BASE;
3223 new_edge->count = edge_in->count;
3225 e = redirect_edge_and_branch (edge_in, new_bb);
3226 gcc_assert (e);
3227 reinstall_phi_args (new_edge, e);
3229 return new_bb;
3233 /* Return true when BB has label LABEL in it. */
3235 static bool
3236 has_label_p (basic_block bb, tree label)
3238 block_stmt_iterator bsi;
3240 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3242 tree stmt = bsi_stmt (bsi);
3244 if (TREE_CODE (stmt) != LABEL_EXPR)
3245 return false;
3246 if (LABEL_EXPR_LABEL (stmt) == label)
3247 return true;
3249 return false;
3253 /* Callback for walk_tree, check that all elements with address taken are
3254 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3255 inside a PHI node. */
3257 static tree
3258 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3260 tree t = *tp, x;
3261 bool in_phi = (data != NULL);
3263 if (TYPE_P (t))
3264 *walk_subtrees = 0;
3266 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3267 #define CHECK_OP(N, MSG) \
3268 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3269 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3271 switch (TREE_CODE (t))
3273 case SSA_NAME:
3274 if (SSA_NAME_IN_FREE_LIST (t))
3276 error ("SSA name in freelist but still referenced");
3277 return *tp;
3279 break;
3281 case ASSERT_EXPR:
3282 x = fold (ASSERT_EXPR_COND (t));
3283 if (x == boolean_false_node)
3285 error ("ASSERT_EXPR with an always-false condition");
3286 return *tp;
3288 break;
3290 case MODIFY_EXPR:
3291 x = TREE_OPERAND (t, 0);
3292 if (TREE_CODE (x) == BIT_FIELD_REF
3293 && is_gimple_reg (TREE_OPERAND (x, 0)))
3295 error ("GIMPLE register modified with BIT_FIELD_REF");
3296 return t;
3298 break;
3300 case ADDR_EXPR:
3302 bool old_invariant;
3303 bool old_constant;
3304 bool old_side_effects;
3305 bool new_invariant;
3306 bool new_constant;
3307 bool new_side_effects;
3309 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3310 dead PHIs that take the address of something. But if the PHI
3311 result is dead, the fact that it takes the address of anything
3312 is irrelevant. Because we can not tell from here if a PHI result
3313 is dead, we just skip this check for PHIs altogether. This means
3314 we may be missing "valid" checks, but what can you do?
3315 This was PR19217. */
3316 if (in_phi)
3317 break;
3319 old_invariant = TREE_INVARIANT (t);
3320 old_constant = TREE_CONSTANT (t);
3321 old_side_effects = TREE_SIDE_EFFECTS (t);
3323 recompute_tree_invariant_for_addr_expr (t);
3324 new_invariant = TREE_INVARIANT (t);
3325 new_side_effects = TREE_SIDE_EFFECTS (t);
3326 new_constant = TREE_CONSTANT (t);
3328 if (old_invariant != new_invariant)
3330 error ("invariant not recomputed when ADDR_EXPR changed");
3331 return t;
3334 if (old_constant != new_constant)
3336 error ("constant not recomputed when ADDR_EXPR changed");
3337 return t;
3339 if (old_side_effects != new_side_effects)
3341 error ("side effects not recomputed when ADDR_EXPR changed");
3342 return t;
3345 /* Skip any references (they will be checked when we recurse down the
3346 tree) and ensure that any variable used as a prefix is marked
3347 addressable. */
3348 for (x = TREE_OPERAND (t, 0);
3349 handled_component_p (x);
3350 x = TREE_OPERAND (x, 0))
3353 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3354 return NULL;
3355 if (!TREE_ADDRESSABLE (x))
3357 error ("address taken, but ADDRESSABLE bit not set");
3358 return x;
3360 break;
3363 case COND_EXPR:
3364 x = COND_EXPR_COND (t);
3365 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3367 error ("non-boolean used in condition");
3368 return x;
3370 if (!is_gimple_condexpr (x))
3372 error ("invalid conditional operand");
3373 return x;
3375 break;
3377 case NOP_EXPR:
3378 case CONVERT_EXPR:
3379 case FIX_TRUNC_EXPR:
3380 case FIX_CEIL_EXPR:
3381 case FIX_FLOOR_EXPR:
3382 case FIX_ROUND_EXPR:
3383 case FLOAT_EXPR:
3384 case NEGATE_EXPR:
3385 case ABS_EXPR:
3386 case BIT_NOT_EXPR:
3387 case NON_LVALUE_EXPR:
3388 case TRUTH_NOT_EXPR:
3389 CHECK_OP (0, "invalid operand to unary operator");
3390 break;
3392 case REALPART_EXPR:
3393 case IMAGPART_EXPR:
3394 case COMPONENT_REF:
3395 case ARRAY_REF:
3396 case ARRAY_RANGE_REF:
3397 case BIT_FIELD_REF:
3398 case VIEW_CONVERT_EXPR:
3399 /* We have a nest of references. Verify that each of the operands
3400 that determine where to reference is either a constant or a variable,
3401 verify that the base is valid, and then show we've already checked
3402 the subtrees. */
3403 while (handled_component_p (t))
3405 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3406 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3407 else if (TREE_CODE (t) == ARRAY_REF
3408 || TREE_CODE (t) == ARRAY_RANGE_REF)
3410 CHECK_OP (1, "invalid array index");
3411 if (TREE_OPERAND (t, 2))
3412 CHECK_OP (2, "invalid array lower bound");
3413 if (TREE_OPERAND (t, 3))
3414 CHECK_OP (3, "invalid array stride");
3416 else if (TREE_CODE (t) == BIT_FIELD_REF)
3418 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3419 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3422 t = TREE_OPERAND (t, 0);
3425 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3427 error ("invalid reference prefix");
3428 return t;
3430 *walk_subtrees = 0;
3431 break;
3433 case LT_EXPR:
3434 case LE_EXPR:
3435 case GT_EXPR:
3436 case GE_EXPR:
3437 case EQ_EXPR:
3438 case NE_EXPR:
3439 case UNORDERED_EXPR:
3440 case ORDERED_EXPR:
3441 case UNLT_EXPR:
3442 case UNLE_EXPR:
3443 case UNGT_EXPR:
3444 case UNGE_EXPR:
3445 case UNEQ_EXPR:
3446 case LTGT_EXPR:
3447 case PLUS_EXPR:
3448 case MINUS_EXPR:
3449 case MULT_EXPR:
3450 case TRUNC_DIV_EXPR:
3451 case CEIL_DIV_EXPR:
3452 case FLOOR_DIV_EXPR:
3453 case ROUND_DIV_EXPR:
3454 case TRUNC_MOD_EXPR:
3455 case CEIL_MOD_EXPR:
3456 case FLOOR_MOD_EXPR:
3457 case ROUND_MOD_EXPR:
3458 case RDIV_EXPR:
3459 case EXACT_DIV_EXPR:
3460 case MIN_EXPR:
3461 case MAX_EXPR:
3462 case LSHIFT_EXPR:
3463 case RSHIFT_EXPR:
3464 case LROTATE_EXPR:
3465 case RROTATE_EXPR:
3466 case BIT_IOR_EXPR:
3467 case BIT_XOR_EXPR:
3468 case BIT_AND_EXPR:
3469 CHECK_OP (0, "invalid operand to binary operator");
3470 CHECK_OP (1, "invalid operand to binary operator");
3471 break;
3473 default:
3474 break;
3476 return NULL;
3478 #undef CHECK_OP
3482 /* Verify STMT, return true if STMT is not in GIMPLE form.
3483 TODO: Implement type checking. */
3485 static bool
3486 verify_stmt (tree stmt, bool last_in_block)
3488 tree addr;
3490 if (OMP_DIRECTIVE_P (stmt))
3492 /* OpenMP directives are validated by the FE and never operated
3493 on by the optimizers. Furthermore, OMP_FOR may contain
3494 non-gimple expressions when the main index variable has had
3495 its address taken. This does not affect the loop itself
3496 because the header of an OMP_FOR is merely used to determine
3497 how to setup the parallel iteration. */
3498 return false;
3501 if (!is_gimple_stmt (stmt))
3503 error ("is not a valid GIMPLE statement");
3504 goto fail;
3507 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3508 if (addr)
3510 debug_generic_stmt (addr);
3511 return true;
3514 /* If the statement is marked as part of an EH region, then it is
3515 expected that the statement could throw. Verify that when we
3516 have optimizations that simplify statements such that we prove
3517 that they cannot throw, that we update other data structures
3518 to match. */
3519 if (lookup_stmt_eh_region (stmt) >= 0)
3521 if (!tree_could_throw_p (stmt))
3523 error ("statement marked for throw, but doesn%'t");
3524 goto fail;
3526 if (!last_in_block && tree_can_throw_internal (stmt))
3528 error ("statement marked for throw in middle of block");
3529 goto fail;
3533 return false;
3535 fail:
3536 debug_generic_stmt (stmt);
3537 return true;
3541 /* Return true when the T can be shared. */
3543 static bool
3544 tree_node_can_be_shared (tree t)
3546 if (IS_TYPE_OR_DECL_P (t)
3547 || is_gimple_min_invariant (t)
3548 || TREE_CODE (t) == SSA_NAME
3549 || t == error_mark_node
3550 || TREE_CODE (t) == IDENTIFIER_NODE)
3551 return true;
3553 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3554 return true;
3556 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3557 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3558 || TREE_CODE (t) == COMPONENT_REF
3559 || TREE_CODE (t) == REALPART_EXPR
3560 || TREE_CODE (t) == IMAGPART_EXPR)
3561 t = TREE_OPERAND (t, 0);
3563 if (DECL_P (t))
3564 return true;
3566 return false;
3570 /* Called via walk_trees. Verify tree sharing. */
3572 static tree
3573 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3575 htab_t htab = (htab_t) data;
3576 void **slot;
3578 if (tree_node_can_be_shared (*tp))
3580 *walk_subtrees = false;
3581 return NULL;
3584 slot = htab_find_slot (htab, *tp, INSERT);
3585 if (*slot)
3586 return (tree) *slot;
3587 *slot = *tp;
3589 return NULL;
3593 /* Verify the GIMPLE statement chain. */
3595 void
3596 verify_stmts (void)
3598 basic_block bb;
3599 block_stmt_iterator bsi;
3600 bool err = false;
3601 htab_t htab;
3602 tree addr;
3604 timevar_push (TV_TREE_STMT_VERIFY);
3605 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3607 FOR_EACH_BB (bb)
3609 tree phi;
3610 int i;
3612 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3614 int phi_num_args = PHI_NUM_ARGS (phi);
3616 if (bb_for_stmt (phi) != bb)
3618 error ("bb_for_stmt (phi) is set to a wrong basic block");
3619 err |= true;
3622 for (i = 0; i < phi_num_args; i++)
3624 tree t = PHI_ARG_DEF (phi, i);
3625 tree addr;
3627 /* Addressable variables do have SSA_NAMEs but they
3628 are not considered gimple values. */
3629 if (TREE_CODE (t) != SSA_NAME
3630 && TREE_CODE (t) != FUNCTION_DECL
3631 && !is_gimple_val (t))
3633 error ("PHI def is not a GIMPLE value");
3634 debug_generic_stmt (phi);
3635 debug_generic_stmt (t);
3636 err |= true;
3639 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3640 if (addr)
3642 debug_generic_stmt (addr);
3643 err |= true;
3646 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3647 if (addr)
3649 error ("incorrect sharing of tree nodes");
3650 debug_generic_stmt (phi);
3651 debug_generic_stmt (addr);
3652 err |= true;
3657 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3659 tree stmt = bsi_stmt (bsi);
3661 if (bb_for_stmt (stmt) != bb)
3663 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3664 err |= true;
3667 bsi_next (&bsi);
3668 err |= verify_stmt (stmt, bsi_end_p (bsi));
3669 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3670 if (addr)
3672 error ("incorrect sharing of tree nodes");
3673 debug_generic_stmt (stmt);
3674 debug_generic_stmt (addr);
3675 err |= true;
3680 if (err)
3681 internal_error ("verify_stmts failed");
3683 htab_delete (htab);
3684 timevar_pop (TV_TREE_STMT_VERIFY);
3688 /* Verifies that the flow information is OK. */
3690 static int
3691 tree_verify_flow_info (void)
3693 int err = 0;
3694 basic_block bb;
3695 block_stmt_iterator bsi;
3696 tree stmt;
3697 edge e;
3698 edge_iterator ei;
3700 if (ENTRY_BLOCK_PTR->stmt_list)
3702 error ("ENTRY_BLOCK has a statement list associated with it");
3703 err = 1;
3706 if (EXIT_BLOCK_PTR->stmt_list)
3708 error ("EXIT_BLOCK has a statement list associated with it");
3709 err = 1;
3712 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3713 if (e->flags & EDGE_FALLTHRU)
3715 error ("fallthru to exit from bb %d", e->src->index);
3716 err = 1;
3719 FOR_EACH_BB (bb)
3721 bool found_ctrl_stmt = false;
3723 stmt = NULL_TREE;
3725 /* Skip labels on the start of basic block. */
3726 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3728 tree prev_stmt = stmt;
3730 stmt = bsi_stmt (bsi);
3732 if (TREE_CODE (stmt) != LABEL_EXPR)
3733 break;
3735 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3737 error ("nonlocal label ");
3738 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3739 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3740 bb->index);
3741 err = 1;
3744 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3746 error ("label ");
3747 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3748 fprintf (stderr, " to block does not match in bb %d",
3749 bb->index);
3750 err = 1;
3753 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3754 != current_function_decl)
3756 error ("label ");
3757 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3758 fprintf (stderr, " has incorrect context in bb %d",
3759 bb->index);
3760 err = 1;
3764 /* Verify that body of basic block BB is free of control flow. */
3765 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3767 tree stmt = bsi_stmt (bsi);
3769 if (found_ctrl_stmt)
3771 error ("control flow in the middle of basic block %d",
3772 bb->index);
3773 err = 1;
3776 if (stmt_ends_bb_p (stmt))
3777 found_ctrl_stmt = true;
3779 if (TREE_CODE (stmt) == LABEL_EXPR)
3781 error ("label ");
3782 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3783 fprintf (stderr, " in the middle of basic block %d", bb->index);
3784 err = 1;
3788 bsi = bsi_last (bb);
3789 if (bsi_end_p (bsi))
3790 continue;
3792 stmt = bsi_stmt (bsi);
3794 err |= verify_eh_edges (stmt);
3796 if (is_ctrl_stmt (stmt))
3798 FOR_EACH_EDGE (e, ei, bb->succs)
3799 if (e->flags & EDGE_FALLTHRU)
3801 error ("fallthru edge after a control statement in bb %d",
3802 bb->index);
3803 err = 1;
3807 switch (TREE_CODE (stmt))
3809 case COND_EXPR:
3811 edge true_edge;
3812 edge false_edge;
3813 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3814 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3816 error ("structured COND_EXPR at the end of bb %d", bb->index);
3817 err = 1;
3820 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3822 if (!true_edge || !false_edge
3823 || !(true_edge->flags & EDGE_TRUE_VALUE)
3824 || !(false_edge->flags & EDGE_FALSE_VALUE)
3825 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3826 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3827 || EDGE_COUNT (bb->succs) >= 3)
3829 error ("wrong outgoing edge flags at end of bb %d",
3830 bb->index);
3831 err = 1;
3834 if (!has_label_p (true_edge->dest,
3835 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3837 error ("%<then%> label does not match edge at end of bb %d",
3838 bb->index);
3839 err = 1;
3842 if (!has_label_p (false_edge->dest,
3843 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3845 error ("%<else%> label does not match edge at end of bb %d",
3846 bb->index);
3847 err = 1;
3850 break;
3852 case GOTO_EXPR:
3853 if (simple_goto_p (stmt))
3855 error ("explicit goto at end of bb %d", bb->index);
3856 err = 1;
3858 else
3860 /* FIXME. We should double check that the labels in the
3861 destination blocks have their address taken. */
3862 FOR_EACH_EDGE (e, ei, bb->succs)
3863 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3864 | EDGE_FALSE_VALUE))
3865 || !(e->flags & EDGE_ABNORMAL))
3867 error ("wrong outgoing edge flags at end of bb %d",
3868 bb->index);
3869 err = 1;
3872 break;
3874 case RETURN_EXPR:
3875 if (!single_succ_p (bb)
3876 || (single_succ_edge (bb)->flags
3877 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3878 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3880 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3881 err = 1;
3883 if (single_succ (bb) != EXIT_BLOCK_PTR)
3885 error ("return edge does not point to exit in bb %d",
3886 bb->index);
3887 err = 1;
3889 break;
3891 case SWITCH_EXPR:
3893 tree prev;
3894 edge e;
3895 size_t i, n;
3896 tree vec;
3898 vec = SWITCH_LABELS (stmt);
3899 n = TREE_VEC_LENGTH (vec);
3901 /* Mark all the destination basic blocks. */
3902 for (i = 0; i < n; ++i)
3904 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3905 basic_block label_bb = label_to_block (lab);
3907 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3908 label_bb->aux = (void *)1;
3911 /* Verify that the case labels are sorted. */
3912 prev = TREE_VEC_ELT (vec, 0);
3913 for (i = 1; i < n - 1; ++i)
3915 tree c = TREE_VEC_ELT (vec, i);
3916 if (! CASE_LOW (c))
3918 error ("found default case not at end of case vector");
3919 err = 1;
3920 continue;
3922 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3924 error ("case labels not sorted: ");
3925 print_generic_expr (stderr, prev, 0);
3926 fprintf (stderr," is greater than ");
3927 print_generic_expr (stderr, c, 0);
3928 fprintf (stderr," but comes before it.\n");
3929 err = 1;
3931 prev = c;
3933 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3935 error ("no default case found at end of case vector");
3936 err = 1;
3939 FOR_EACH_EDGE (e, ei, bb->succs)
3941 if (!e->dest->aux)
3943 error ("extra outgoing edge %d->%d",
3944 bb->index, e->dest->index);
3945 err = 1;
3947 e->dest->aux = (void *)2;
3948 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3949 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3951 error ("wrong outgoing edge flags at end of bb %d",
3952 bb->index);
3953 err = 1;
3957 /* Check that we have all of them. */
3958 for (i = 0; i < n; ++i)
3960 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3961 basic_block label_bb = label_to_block (lab);
3963 if (label_bb->aux != (void *)2)
3965 error ("missing edge %i->%i",
3966 bb->index, label_bb->index);
3967 err = 1;
3971 FOR_EACH_EDGE (e, ei, bb->succs)
3972 e->dest->aux = (void *)0;
3975 default: ;
3979 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3980 verify_dominators (CDI_DOMINATORS);
3982 return err;
3986 /* Updates phi nodes after creating a forwarder block joined
3987 by edge FALLTHRU. */
3989 static void
3990 tree_make_forwarder_block (edge fallthru)
3992 edge e;
3993 edge_iterator ei;
3994 basic_block dummy, bb;
3995 tree phi, new_phi, var;
3997 dummy = fallthru->src;
3998 bb = fallthru->dest;
4000 if (single_pred_p (bb))
4001 return;
4003 /* If we redirected a branch we must create new phi nodes at the
4004 start of BB. */
4005 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4007 var = PHI_RESULT (phi);
4008 new_phi = create_phi_node (var, bb);
4009 SSA_NAME_DEF_STMT (var) = new_phi;
4010 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4011 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4014 /* Ensure that the PHI node chain is in the same order. */
4015 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4017 /* Add the arguments we have stored on edges. */
4018 FOR_EACH_EDGE (e, ei, bb->preds)
4020 if (e == fallthru)
4021 continue;
4023 flush_pending_stmts (e);
4028 /* Return a non-special label in the head of basic block BLOCK.
4029 Create one if it doesn't exist. */
4031 tree
4032 tree_block_label (basic_block bb)
4034 block_stmt_iterator i, s = bsi_start (bb);
4035 bool first = true;
4036 tree label, stmt;
4038 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4040 stmt = bsi_stmt (i);
4041 if (TREE_CODE (stmt) != LABEL_EXPR)
4042 break;
4043 label = LABEL_EXPR_LABEL (stmt);
4044 if (!DECL_NONLOCAL (label))
4046 if (!first)
4047 bsi_move_before (&i, &s);
4048 return label;
4052 label = create_artificial_label ();
4053 stmt = build1 (LABEL_EXPR, void_type_node, label);
4054 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4055 return label;
4059 /* Attempt to perform edge redirection by replacing a possibly complex
4060 jump instruction by a goto or by removing the jump completely.
4061 This can apply only if all edges now point to the same block. The
4062 parameters and return values are equivalent to
4063 redirect_edge_and_branch. */
4065 static edge
4066 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4068 basic_block src = e->src;
4069 block_stmt_iterator b;
4070 tree stmt;
4072 /* We can replace or remove a complex jump only when we have exactly
4073 two edges. */
4074 if (EDGE_COUNT (src->succs) != 2
4075 /* Verify that all targets will be TARGET. Specifically, the
4076 edge that is not E must also go to TARGET. */
4077 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4078 return NULL;
4080 b = bsi_last (src);
4081 if (bsi_end_p (b))
4082 return NULL;
4083 stmt = bsi_stmt (b);
4085 if (TREE_CODE (stmt) == COND_EXPR
4086 || TREE_CODE (stmt) == SWITCH_EXPR)
4088 bsi_remove (&b, true);
4089 e = ssa_redirect_edge (e, target);
4090 e->flags = EDGE_FALLTHRU;
4091 return e;
4094 return NULL;
4098 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4099 edge representing the redirected branch. */
4101 static edge
4102 tree_redirect_edge_and_branch (edge e, basic_block dest)
4104 basic_block bb = e->src;
4105 block_stmt_iterator bsi;
4106 edge ret;
4107 tree label, stmt;
4109 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4110 return NULL;
4112 if (e->src != ENTRY_BLOCK_PTR
4113 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4114 return ret;
4116 if (e->dest == dest)
4117 return NULL;
4119 label = tree_block_label (dest);
4121 bsi = bsi_last (bb);
4122 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4124 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4126 case COND_EXPR:
4127 stmt = (e->flags & EDGE_TRUE_VALUE
4128 ? COND_EXPR_THEN (stmt)
4129 : COND_EXPR_ELSE (stmt));
4130 GOTO_DESTINATION (stmt) = label;
4131 break;
4133 case GOTO_EXPR:
4134 /* No non-abnormal edges should lead from a non-simple goto, and
4135 simple ones should be represented implicitly. */
4136 gcc_unreachable ();
4138 case SWITCH_EXPR:
4140 tree cases = get_cases_for_edge (e, stmt);
4142 /* If we have a list of cases associated with E, then use it
4143 as it's a lot faster than walking the entire case vector. */
4144 if (cases)
4146 edge e2 = find_edge (e->src, dest);
4147 tree last, first;
4149 first = cases;
4150 while (cases)
4152 last = cases;
4153 CASE_LABEL (cases) = label;
4154 cases = TREE_CHAIN (cases);
4157 /* If there was already an edge in the CFG, then we need
4158 to move all the cases associated with E to E2. */
4159 if (e2)
4161 tree cases2 = get_cases_for_edge (e2, stmt);
4163 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4164 TREE_CHAIN (cases2) = first;
4167 else
4169 tree vec = SWITCH_LABELS (stmt);
4170 size_t i, n = TREE_VEC_LENGTH (vec);
4172 for (i = 0; i < n; i++)
4174 tree elt = TREE_VEC_ELT (vec, i);
4176 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4177 CASE_LABEL (elt) = label;
4181 break;
4184 case RETURN_EXPR:
4185 bsi_remove (&bsi, true);
4186 e->flags |= EDGE_FALLTHRU;
4187 break;
4189 default:
4190 /* Otherwise it must be a fallthru edge, and we don't need to
4191 do anything besides redirecting it. */
4192 gcc_assert (e->flags & EDGE_FALLTHRU);
4193 break;
4196 /* Update/insert PHI nodes as necessary. */
4198 /* Now update the edges in the CFG. */
4199 e = ssa_redirect_edge (e, dest);
4201 return e;
4205 /* Simple wrapper, as we can always redirect fallthru edges. */
4207 static basic_block
4208 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4210 e = tree_redirect_edge_and_branch (e, dest);
4211 gcc_assert (e);
4213 return NULL;
4217 /* Splits basic block BB after statement STMT (but at least after the
4218 labels). If STMT is NULL, BB is split just after the labels. */
4220 static basic_block
4221 tree_split_block (basic_block bb, void *stmt)
4223 block_stmt_iterator bsi, bsi_tgt;
4224 tree act;
4225 basic_block new_bb;
4226 edge e;
4227 edge_iterator ei;
4229 new_bb = create_empty_bb (bb);
4231 /* Redirect the outgoing edges. */
4232 new_bb->succs = bb->succs;
4233 bb->succs = NULL;
4234 FOR_EACH_EDGE (e, ei, new_bb->succs)
4235 e->src = new_bb;
4237 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4238 stmt = NULL;
4240 /* Move everything from BSI to the new basic block. */
4241 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4243 act = bsi_stmt (bsi);
4244 if (TREE_CODE (act) == LABEL_EXPR)
4245 continue;
4247 if (!stmt)
4248 break;
4250 if (stmt == act)
4252 bsi_next (&bsi);
4253 break;
4257 bsi_tgt = bsi_start (new_bb);
4258 while (!bsi_end_p (bsi))
4260 act = bsi_stmt (bsi);
4261 bsi_remove (&bsi, false);
4262 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4265 return new_bb;
4269 /* Moves basic block BB after block AFTER. */
4271 static bool
4272 tree_move_block_after (basic_block bb, basic_block after)
4274 if (bb->prev_bb == after)
4275 return true;
4277 unlink_block (bb);
4278 link_block (bb, after);
4280 return true;
4284 /* Return true if basic_block can be duplicated. */
4286 static bool
4287 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4289 return true;
4293 /* Create a duplicate of the basic block BB. NOTE: This does not
4294 preserve SSA form. */
4296 static basic_block
4297 tree_duplicate_bb (basic_block bb)
4299 basic_block new_bb;
4300 block_stmt_iterator bsi, bsi_tgt;
4301 tree phi;
4303 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4305 /* Copy the PHI nodes. We ignore PHI node arguments here because
4306 the incoming edges have not been setup yet. */
4307 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4309 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4310 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4313 /* Keep the chain of PHI nodes in the same order so that they can be
4314 updated by ssa_redirect_edge. */
4315 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4317 bsi_tgt = bsi_start (new_bb);
4318 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4320 def_operand_p def_p;
4321 ssa_op_iter op_iter;
4322 tree stmt, copy;
4323 int region;
4325 stmt = bsi_stmt (bsi);
4326 if (TREE_CODE (stmt) == LABEL_EXPR)
4327 continue;
4329 /* Create a new copy of STMT and duplicate STMT's virtual
4330 operands. */
4331 copy = unshare_expr (stmt);
4332 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4333 copy_virtual_operands (copy, stmt);
4334 region = lookup_stmt_eh_region (stmt);
4335 if (region >= 0)
4336 add_stmt_to_eh_region (copy, region);
4338 /* Create new names for all the definitions created by COPY and
4339 add replacement mappings for each new name. */
4340 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4341 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4344 return new_bb;
4348 /* Basic block BB_COPY was created by code duplication. Add phi node
4349 arguments for edges going out of BB_COPY. The blocks that were
4350 duplicated have BB_DUPLICATED set. */
4352 void
4353 add_phi_args_after_copy_bb (basic_block bb_copy)
4355 basic_block bb, dest;
4356 edge e, e_copy;
4357 edge_iterator ei;
4358 tree phi, phi_copy, phi_next, def;
4360 bb = get_bb_original (bb_copy);
4362 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4364 if (!phi_nodes (e_copy->dest))
4365 continue;
4367 if (e_copy->dest->flags & BB_DUPLICATED)
4368 dest = get_bb_original (e_copy->dest);
4369 else
4370 dest = e_copy->dest;
4372 e = find_edge (bb, dest);
4373 if (!e)
4375 /* During loop unrolling the target of the latch edge is copied.
4376 In this case we are not looking for edge to dest, but to
4377 duplicated block whose original was dest. */
4378 FOR_EACH_EDGE (e, ei, bb->succs)
4379 if ((e->dest->flags & BB_DUPLICATED)
4380 && get_bb_original (e->dest) == dest)
4381 break;
4383 gcc_assert (e != NULL);
4386 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4387 phi;
4388 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4390 phi_next = PHI_CHAIN (phi);
4391 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4392 add_phi_arg (phi_copy, def, e_copy);
4397 /* Blocks in REGION_COPY array of length N_REGION were created by
4398 duplication of basic blocks. Add phi node arguments for edges
4399 going from these blocks. */
4401 void
4402 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4404 unsigned i;
4406 for (i = 0; i < n_region; i++)
4407 region_copy[i]->flags |= BB_DUPLICATED;
4409 for (i = 0; i < n_region; i++)
4410 add_phi_args_after_copy_bb (region_copy[i]);
4412 for (i = 0; i < n_region; i++)
4413 region_copy[i]->flags &= ~BB_DUPLICATED;
4416 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4417 important exit edge EXIT. By important we mean that no SSA name defined
4418 inside region is live over the other exit edges of the region. All entry
4419 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4420 to the duplicate of the region. SSA form, dominance and loop information
4421 is updated. The new basic blocks are stored to REGION_COPY in the same
4422 order as they had in REGION, provided that REGION_COPY is not NULL.
4423 The function returns false if it is unable to copy the region,
4424 true otherwise. */
4426 bool
4427 tree_duplicate_sese_region (edge entry, edge exit,
4428 basic_block *region, unsigned n_region,
4429 basic_block *region_copy)
4431 unsigned i, n_doms;
4432 bool free_region_copy = false, copying_header = false;
4433 struct loop *loop = entry->dest->loop_father;
4434 edge exit_copy;
4435 basic_block *doms;
4436 edge redirected;
4437 int total_freq = 0, entry_freq = 0;
4438 gcov_type total_count = 0, entry_count = 0;
4440 if (!can_copy_bbs_p (region, n_region))
4441 return false;
4443 /* Some sanity checking. Note that we do not check for all possible
4444 missuses of the functions. I.e. if you ask to copy something weird,
4445 it will work, but the state of structures probably will not be
4446 correct. */
4447 for (i = 0; i < n_region; i++)
4449 /* We do not handle subloops, i.e. all the blocks must belong to the
4450 same loop. */
4451 if (region[i]->loop_father != loop)
4452 return false;
4454 if (region[i] != entry->dest
4455 && region[i] == loop->header)
4456 return false;
4459 loop->copy = loop;
4461 /* In case the function is used for loop header copying (which is the primary
4462 use), ensure that EXIT and its copy will be new latch and entry edges. */
4463 if (loop->header == entry->dest)
4465 copying_header = true;
4466 loop->copy = loop->outer;
4468 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4469 return false;
4471 for (i = 0; i < n_region; i++)
4472 if (region[i] != exit->src
4473 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4474 return false;
4477 if (!region_copy)
4479 region_copy = XNEWVEC (basic_block, n_region);
4480 free_region_copy = true;
4483 gcc_assert (!need_ssa_update_p ());
4485 /* Record blocks outside the region that are dominated by something
4486 inside. */
4487 doms = XNEWVEC (basic_block, n_basic_blocks);
4488 initialize_original_copy_tables ();
4490 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4492 if (entry->dest->count)
4494 total_count = entry->dest->count;
4495 entry_count = entry->count;
4496 /* Fix up corner cases, to avoid division by zero or creation of negative
4497 frequencies. */
4498 if (entry_count > total_count)
4499 entry_count = total_count;
4501 else
4503 total_freq = entry->dest->frequency;
4504 entry_freq = EDGE_FREQUENCY (entry);
4505 /* Fix up corner cases, to avoid division by zero or creation of negative
4506 frequencies. */
4507 if (total_freq == 0)
4508 total_freq = 1;
4509 else if (entry_freq > total_freq)
4510 entry_freq = total_freq;
4513 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4514 split_edge_bb_loc (entry));
4515 if (total_count)
4517 scale_bbs_frequencies_gcov_type (region, n_region,
4518 total_count - entry_count,
4519 total_count);
4520 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4521 total_count);
4523 else
4525 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4526 total_freq);
4527 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4530 if (copying_header)
4532 loop->header = exit->dest;
4533 loop->latch = exit->src;
4536 /* Redirect the entry and add the phi node arguments. */
4537 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4538 gcc_assert (redirected != NULL);
4539 flush_pending_stmts (entry);
4541 /* Concerning updating of dominators: We must recount dominators
4542 for entry block and its copy. Anything that is outside of the
4543 region, but was dominated by something inside needs recounting as
4544 well. */
4545 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4546 doms[n_doms++] = get_bb_original (entry->dest);
4547 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4548 free (doms);
4550 /* Add the other PHI node arguments. */
4551 add_phi_args_after_copy (region_copy, n_region);
4553 /* Update the SSA web. */
4554 update_ssa (TODO_update_ssa);
4556 if (free_region_copy)
4557 free (region_copy);
4559 free_original_copy_tables ();
4560 return true;
4564 DEF_VEC_P(basic_block);
4565 DEF_VEC_ALLOC_P(basic_block,heap);
4568 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4569 adding blocks when the dominator traversal reaches EXIT. This
4570 function silently assumes that ENTRY strictly dominates EXIT. */
4572 static void
4573 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4574 VEC(basic_block,heap) **bbs_p)
4576 basic_block son;
4578 for (son = first_dom_son (CDI_DOMINATORS, entry);
4579 son;
4580 son = next_dom_son (CDI_DOMINATORS, son))
4582 VEC_safe_push (basic_block, heap, *bbs_p, son);
4583 if (son != exit)
4584 gather_blocks_in_sese_region (son, exit, bbs_p);
4589 struct move_stmt_d
4591 tree block;
4592 tree from_context;
4593 tree to_context;
4594 bitmap vars_to_remove;
4595 bool remap_decls_p;
4598 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4599 contained in *TP and change the DECL_CONTEXT of every local
4600 variable referenced in *TP. */
4602 static tree
4603 move_stmt_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4605 struct move_stmt_d *p = (struct move_stmt_d *) data;
4607 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
4608 TREE_BLOCK (*tp) = p->block;
4610 if (OMP_DIRECTIVE_P (*tp))
4612 /* Do not remap variables inside OMP directives. Variables
4613 referenced in clauses and directive header belong to the
4614 parent function and should not be moved into the child
4615 function. */
4616 p->remap_decls_p = false;
4619 if (p->remap_decls_p
4620 && DECL_P (*tp)
4621 && DECL_CONTEXT (*tp) == p->from_context)
4623 DECL_CONTEXT (*tp) = p->to_context;
4625 if (TREE_CODE (*tp) == VAR_DECL)
4627 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4628 f->unexpanded_var_list = tree_cons (0, *tp, f->unexpanded_var_list);
4630 /* Mark *TP to be removed from the original function,
4631 otherwise it will be given a DECL_RTL when the original
4632 function is expanded. */
4633 bitmap_set_bit (p->vars_to_remove, DECL_UID (*tp));
4637 return NULL_TREE;
4641 /* Move basic block BB from function CFUN to function DEST_FN. The
4642 block is moved out of the original linked list and placed after
4643 block AFTER in the new list. Also, the block is removed from the
4644 original array of blocks and placed in DEST_FN's array of blocks.
4645 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4646 updated to reflect the moved edges.
4648 On exit, local variables that need to be removed from
4649 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4651 static void
4652 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4653 basic_block after, bool update_edge_count_p,
4654 bitmap vars_to_remove)
4656 struct control_flow_graph *cfg;
4657 edge_iterator ei;
4658 edge e;
4659 block_stmt_iterator si;
4660 struct move_stmt_d d;
4661 unsigned old_len, new_len;
4662 basic_block *addr;
4664 /* Link BB to the new linked list. */
4665 move_block_after (bb, after);
4667 /* Update the edge count in the corresponding flowgraphs. */
4668 if (update_edge_count_p)
4669 FOR_EACH_EDGE (e, ei, bb->succs)
4671 cfun->cfg->x_n_edges--;
4672 dest_cfun->cfg->x_n_edges++;
4675 /* Remove BB from the original basic block array. */
4676 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4677 cfun->cfg->x_n_basic_blocks--;
4679 /* Grow DEST_CFUN's basic block array if needed. */
4680 cfg = dest_cfun->cfg;
4681 cfg->x_n_basic_blocks++;
4682 if (bb->index > cfg->x_last_basic_block)
4683 cfg->x_last_basic_block = bb->index;
4685 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4686 if ((unsigned) cfg->x_last_basic_block >= old_len)
4688 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4689 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4690 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4691 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4694 VEC_replace (basic_block, cfg->x_basic_block_info,
4695 cfg->x_last_basic_block, bb);
4697 /* The statements in BB need to be associated with a new TREE_BLOCK.
4698 Labels need to be associated with a new label-to-block map. */
4699 memset (&d, 0, sizeof (d));
4700 d.vars_to_remove = vars_to_remove;
4702 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4704 tree stmt = bsi_stmt (si);
4706 d.from_context = cfun->decl;
4707 d.to_context = dest_cfun->decl;
4708 d.remap_decls_p = true;
4709 if (TREE_BLOCK (stmt))
4710 d.block = DECL_INITIAL (dest_cfun->decl);
4712 walk_tree (&stmt, move_stmt_r, &d, NULL);
4714 if (TREE_CODE (stmt) == LABEL_EXPR)
4716 tree label = LABEL_EXPR_LABEL (stmt);
4717 int uid = LABEL_DECL_UID (label);
4719 gcc_assert (uid > -1);
4721 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4722 if (old_len <= (unsigned) uid)
4724 new_len = 3 * uid / 2;
4725 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4726 new_len);
4727 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4728 memset (&addr[old_len], 0,
4729 sizeof (basic_block) * (new_len - old_len));
4732 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4733 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4735 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4737 if (uid >= dest_cfun->last_label_uid)
4738 dest_cfun->last_label_uid = uid + 1;
4744 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4745 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4746 single basic block in the original CFG and the new basic block is
4747 returned. DEST_CFUN must not have a CFG yet.
4749 Note that the region need not be a pure SESE region. Blocks inside
4750 the region may contain calls to abort/exit. The only restriction
4751 is that ENTRY_BB should be the only entry point and it must
4752 dominate EXIT_BB.
4754 All local variables referenced in the region are assumed to be in
4755 the corresponding BLOCK_VARS and unexpanded variable lists
4756 associated with DEST_CFUN. */
4758 basic_block
4759 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4760 basic_block exit_bb)
4762 VEC(basic_block,heap) *bbs;
4763 basic_block after, bb, *entry_pred, *exit_succ;
4764 struct function *saved_cfun;
4765 int *entry_flag, *exit_flag;
4766 unsigned i, num_entry_edges, num_exit_edges;
4767 edge e;
4768 edge_iterator ei;
4769 bitmap vars_to_remove;
4771 saved_cfun = cfun;
4773 /* Collect all the blocks in the region. Manually add ENTRY_BB
4774 because it won't be added by dfs_enumerate_from. */
4775 calculate_dominance_info (CDI_DOMINATORS);
4777 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4778 region. */
4779 gcc_assert (entry_bb != exit_bb
4780 && dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb));
4782 bbs = NULL;
4783 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4784 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4786 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4787 the predecessor edges to ENTRY_BB and the successor edges to
4788 EXIT_BB so that we can re-attach them to the new basic block that
4789 will replace the region. */
4790 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4791 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4792 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4793 i = 0;
4794 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4796 entry_flag[i] = e->flags;
4797 entry_pred[i++] = e->src;
4798 remove_edge (e);
4801 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4802 exit_succ = (basic_block *) xcalloc (num_exit_edges, sizeof (basic_block));
4803 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4804 i = 0;
4805 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4807 exit_flag[i] = e->flags;
4808 exit_succ[i++] = e->dest;
4809 remove_edge (e);
4812 /* Switch context to the child function to initialize DEST_FN's CFG. */
4813 gcc_assert (dest_cfun->cfg == NULL);
4814 cfun = dest_cfun;
4815 init_empty_tree_cfg ();
4816 cfun = saved_cfun;
4818 /* Move blocks from BBS into DEST_CFUN. */
4819 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4820 after = dest_cfun->cfg->x_entry_block_ptr;
4821 vars_to_remove = BITMAP_ALLOC (NULL);
4822 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4824 /* No need to update edge counts on the last block. It has
4825 already been updated earlier when we detached the region from
4826 the original CFG. */
4827 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove);
4828 after = bb;
4831 /* Remove the variables marked in VARS_TO_REMOVE from
4832 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4833 DECL_RTL in the context of CFUN. */
4834 if (!bitmap_empty_p (vars_to_remove))
4836 tree *p;
4838 for (p = &cfun->unexpanded_var_list; *p; )
4840 tree var = TREE_VALUE (*p);
4841 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4843 *p = TREE_CHAIN (*p);
4844 continue;
4847 p = &TREE_CHAIN (*p);
4851 BITMAP_FREE (vars_to_remove);
4853 /* Rewire the entry and exit blocks. The successor to the entry
4854 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4855 the child function. Similarly, the predecessor of DEST_FN's
4856 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4857 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4858 various CFG manipulation function get to the right CFG.
4860 FIXME, this is silly. The CFG ought to become a parameter to
4861 these helpers. */
4862 cfun = dest_cfun;
4863 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4864 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4865 cfun = saved_cfun;
4867 /* Back in the original function, the SESE region has disappeared,
4868 create a new basic block in its place. */
4869 bb = create_empty_bb (entry_pred[0]);
4870 for (i = 0; i < num_entry_edges; i++)
4871 make_edge (entry_pred[i], bb, entry_flag[i]);
4873 for (i = 0; i < num_exit_edges; i++)
4874 make_edge (bb, exit_succ[i], exit_flag[i]);
4876 free (exit_flag);
4877 free (entry_flag);
4878 free (entry_pred);
4879 free (exit_succ);
4880 free_dominance_info (CDI_DOMINATORS);
4881 free_dominance_info (CDI_POST_DOMINATORS);
4882 VEC_free (basic_block, heap, bbs);
4884 return bb;
4888 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4890 void
4891 dump_function_to_file (tree fn, FILE *file, int flags)
4893 tree arg, vars, var;
4894 bool ignore_topmost_bind = false, any_var = false;
4895 basic_block bb;
4896 tree chain;
4897 struct function *saved_cfun;
4899 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4901 arg = DECL_ARGUMENTS (fn);
4902 while (arg)
4904 print_generic_expr (file, arg, dump_flags);
4905 if (TREE_CHAIN (arg))
4906 fprintf (file, ", ");
4907 arg = TREE_CHAIN (arg);
4909 fprintf (file, ")\n");
4911 if (flags & TDF_DETAILS)
4912 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4913 if (flags & TDF_RAW)
4915 dump_node (fn, TDF_SLIM | flags, file);
4916 return;
4919 /* Switch CFUN to point to FN. */
4920 saved_cfun = cfun;
4921 cfun = DECL_STRUCT_FUNCTION (fn);
4923 /* When GIMPLE is lowered, the variables are no longer available in
4924 BIND_EXPRs, so display them separately. */
4925 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4927 ignore_topmost_bind = true;
4929 fprintf (file, "{\n");
4930 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4932 var = TREE_VALUE (vars);
4934 print_generic_decl (file, var, flags);
4935 fprintf (file, "\n");
4937 any_var = true;
4941 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4943 /* Make a CFG based dump. */
4944 check_bb_profile (ENTRY_BLOCK_PTR, file);
4945 if (!ignore_topmost_bind)
4946 fprintf (file, "{\n");
4948 if (any_var && n_basic_blocks)
4949 fprintf (file, "\n");
4951 FOR_EACH_BB (bb)
4952 dump_generic_bb (file, bb, 2, flags);
4954 fprintf (file, "}\n");
4955 check_bb_profile (EXIT_BLOCK_PTR, file);
4957 else
4959 int indent;
4961 /* Make a tree based dump. */
4962 chain = DECL_SAVED_TREE (fn);
4964 if (chain && TREE_CODE (chain) == BIND_EXPR)
4966 if (ignore_topmost_bind)
4968 chain = BIND_EXPR_BODY (chain);
4969 indent = 2;
4971 else
4972 indent = 0;
4974 else
4976 if (!ignore_topmost_bind)
4977 fprintf (file, "{\n");
4978 indent = 2;
4981 if (any_var)
4982 fprintf (file, "\n");
4984 print_generic_stmt_indented (file, chain, flags, indent);
4985 if (ignore_topmost_bind)
4986 fprintf (file, "}\n");
4989 fprintf (file, "\n\n");
4991 /* Restore CFUN. */
4992 cfun = saved_cfun;
4996 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
4998 void
4999 debug_function (tree fn, int flags)
5001 dump_function_to_file (fn, stderr, flags);
5005 /* Pretty print of the loops intermediate representation. */
5006 static void print_loop (FILE *, struct loop *, int);
5007 static void print_pred_bbs (FILE *, basic_block bb);
5008 static void print_succ_bbs (FILE *, basic_block bb);
5011 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5013 static void
5014 print_pred_bbs (FILE *file, basic_block bb)
5016 edge e;
5017 edge_iterator ei;
5019 FOR_EACH_EDGE (e, ei, bb->preds)
5020 fprintf (file, "bb_%d ", e->src->index);
5024 /* Print on FILE the indexes for the successors of basic_block BB. */
5026 static void
5027 print_succ_bbs (FILE *file, basic_block bb)
5029 edge e;
5030 edge_iterator ei;
5032 FOR_EACH_EDGE (e, ei, bb->succs)
5033 fprintf (file, "bb_%d ", e->dest->index);
5037 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5039 static void
5040 print_loop (FILE *file, struct loop *loop, int indent)
5042 char *s_indent;
5043 basic_block bb;
5045 if (loop == NULL)
5046 return;
5048 s_indent = (char *) alloca ((size_t) indent + 1);
5049 memset ((void *) s_indent, ' ', (size_t) indent);
5050 s_indent[indent] = '\0';
5052 /* Print the loop's header. */
5053 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5055 /* Print the loop's body. */
5056 fprintf (file, "%s{\n", s_indent);
5057 FOR_EACH_BB (bb)
5058 if (bb->loop_father == loop)
5060 /* Print the basic_block's header. */
5061 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5062 print_pred_bbs (file, bb);
5063 fprintf (file, "}, succs = {");
5064 print_succ_bbs (file, bb);
5065 fprintf (file, "})\n");
5067 /* Print the basic_block's body. */
5068 fprintf (file, "%s {\n", s_indent);
5069 tree_dump_bb (bb, file, indent + 4);
5070 fprintf (file, "%s }\n", s_indent);
5073 print_loop (file, loop->inner, indent + 2);
5074 fprintf (file, "%s}\n", s_indent);
5075 print_loop (file, loop->next, indent);
5079 /* Follow a CFG edge from the entry point of the program, and on entry
5080 of a loop, pretty print the loop structure on FILE. */
5082 void
5083 print_loop_ir (FILE *file)
5085 basic_block bb;
5087 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5088 if (bb && bb->loop_father)
5089 print_loop (file, bb->loop_father, 0);
5093 /* Debugging loops structure at tree level. */
5095 void
5096 debug_loop_ir (void)
5098 print_loop_ir (stderr);
5102 /* Return true if BB ends with a call, possibly followed by some
5103 instructions that must stay with the call. Return false,
5104 otherwise. */
5106 static bool
5107 tree_block_ends_with_call_p (basic_block bb)
5109 block_stmt_iterator bsi = bsi_last (bb);
5110 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5114 /* Return true if BB ends with a conditional branch. Return false,
5115 otherwise. */
5117 static bool
5118 tree_block_ends_with_condjump_p (basic_block bb)
5120 tree stmt = last_stmt (bb);
5121 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5125 /* Return true if we need to add fake edge to exit at statement T.
5126 Helper function for tree_flow_call_edges_add. */
5128 static bool
5129 need_fake_edge_p (tree t)
5131 tree call;
5133 /* NORETURN and LONGJMP calls already have an edge to exit.
5134 CONST and PURE calls do not need one.
5135 We don't currently check for CONST and PURE here, although
5136 it would be a good idea, because those attributes are
5137 figured out from the RTL in mark_constant_function, and
5138 the counter incrementation code from -fprofile-arcs
5139 leads to different results from -fbranch-probabilities. */
5140 call = get_call_expr_in (t);
5141 if (call
5142 && !(call_expr_flags (call) & ECF_NORETURN))
5143 return true;
5145 if (TREE_CODE (t) == ASM_EXPR
5146 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5147 return true;
5149 return false;
5153 /* Add fake edges to the function exit for any non constant and non
5154 noreturn calls, volatile inline assembly in the bitmap of blocks
5155 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5156 the number of blocks that were split.
5158 The goal is to expose cases in which entering a basic block does
5159 not imply that all subsequent instructions must be executed. */
5161 static int
5162 tree_flow_call_edges_add (sbitmap blocks)
5164 int i;
5165 int blocks_split = 0;
5166 int last_bb = last_basic_block;
5167 bool check_last_block = false;
5169 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5170 return 0;
5172 if (! blocks)
5173 check_last_block = true;
5174 else
5175 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5177 /* In the last basic block, before epilogue generation, there will be
5178 a fallthru edge to EXIT. Special care is required if the last insn
5179 of the last basic block is a call because make_edge folds duplicate
5180 edges, which would result in the fallthru edge also being marked
5181 fake, which would result in the fallthru edge being removed by
5182 remove_fake_edges, which would result in an invalid CFG.
5184 Moreover, we can't elide the outgoing fake edge, since the block
5185 profiler needs to take this into account in order to solve the minimal
5186 spanning tree in the case that the call doesn't return.
5188 Handle this by adding a dummy instruction in a new last basic block. */
5189 if (check_last_block)
5191 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5192 block_stmt_iterator bsi = bsi_last (bb);
5193 tree t = NULL_TREE;
5194 if (!bsi_end_p (bsi))
5195 t = bsi_stmt (bsi);
5197 if (t && need_fake_edge_p (t))
5199 edge e;
5201 e = find_edge (bb, EXIT_BLOCK_PTR);
5202 if (e)
5204 bsi_insert_on_edge (e, build_empty_stmt ());
5205 bsi_commit_edge_inserts ();
5210 /* Now add fake edges to the function exit for any non constant
5211 calls since there is no way that we can determine if they will
5212 return or not... */
5213 for (i = 0; i < last_bb; i++)
5215 basic_block bb = BASIC_BLOCK (i);
5216 block_stmt_iterator bsi;
5217 tree stmt, last_stmt;
5219 if (!bb)
5220 continue;
5222 if (blocks && !TEST_BIT (blocks, i))
5223 continue;
5225 bsi = bsi_last (bb);
5226 if (!bsi_end_p (bsi))
5228 last_stmt = bsi_stmt (bsi);
5231 stmt = bsi_stmt (bsi);
5232 if (need_fake_edge_p (stmt))
5234 edge e;
5235 /* The handling above of the final block before the
5236 epilogue should be enough to verify that there is
5237 no edge to the exit block in CFG already.
5238 Calling make_edge in such case would cause us to
5239 mark that edge as fake and remove it later. */
5240 #ifdef ENABLE_CHECKING
5241 if (stmt == last_stmt)
5243 e = find_edge (bb, EXIT_BLOCK_PTR);
5244 gcc_assert (e == NULL);
5246 #endif
5248 /* Note that the following may create a new basic block
5249 and renumber the existing basic blocks. */
5250 if (stmt != last_stmt)
5252 e = split_block (bb, stmt);
5253 if (e)
5254 blocks_split++;
5256 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5258 bsi_prev (&bsi);
5260 while (!bsi_end_p (bsi));
5264 if (blocks_split)
5265 verify_flow_info ();
5267 return blocks_split;
5270 bool
5271 tree_purge_dead_eh_edges (basic_block bb)
5273 bool changed = false;
5274 edge e;
5275 edge_iterator ei;
5276 tree stmt = last_stmt (bb);
5278 if (stmt && tree_can_throw_internal (stmt))
5279 return false;
5281 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5283 if (e->flags & EDGE_EH)
5285 remove_edge (e);
5286 changed = true;
5288 else
5289 ei_next (&ei);
5292 /* Removal of dead EH edges might change dominators of not
5293 just immediate successors. E.g. when bb1 is changed so that
5294 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5295 eh edges purged by this function in:
5299 1-->2
5300 / \ |
5301 v v |
5302 3-->4 |
5304 --->5
5307 idom(bb5) must be recomputed. For now just free the dominance
5308 info. */
5309 if (changed)
5310 free_dominance_info (CDI_DOMINATORS);
5312 return changed;
5315 bool
5316 tree_purge_all_dead_eh_edges (bitmap blocks)
5318 bool changed = false;
5319 unsigned i;
5320 bitmap_iterator bi;
5322 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5324 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5327 return changed;
5330 /* This function is called whenever a new edge is created or
5331 redirected. */
5333 static void
5334 tree_execute_on_growing_pred (edge e)
5336 basic_block bb = e->dest;
5338 if (phi_nodes (bb))
5339 reserve_phi_args_for_new_edge (bb);
5342 /* This function is called immediately before edge E is removed from
5343 the edge vector E->dest->preds. */
5345 static void
5346 tree_execute_on_shrinking_pred (edge e)
5348 if (phi_nodes (e->dest))
5349 remove_phi_args (e);
5352 /*---------------------------------------------------------------------------
5353 Helper functions for Loop versioning
5354 ---------------------------------------------------------------------------*/
5356 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5357 of 'first'. Both of them are dominated by 'new_head' basic block. When
5358 'new_head' was created by 'second's incoming edge it received phi arguments
5359 on the edge by split_edge(). Later, additional edge 'e' was created to
5360 connect 'new_head' and 'first'. Now this routine adds phi args on this
5361 additional edge 'e' that new_head to second edge received as part of edge
5362 splitting.
5365 static void
5366 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5367 basic_block new_head, edge e)
5369 tree phi1, phi2;
5370 edge e2 = find_edge (new_head, second);
5372 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5373 edge, we should always have an edge from NEW_HEAD to SECOND. */
5374 gcc_assert (e2 != NULL);
5376 /* Browse all 'second' basic block phi nodes and add phi args to
5377 edge 'e' for 'first' head. PHI args are always in correct order. */
5379 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5380 phi2 && phi1;
5381 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5383 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5384 add_phi_arg (phi1, def, e);
5388 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5389 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5390 the destination of the ELSE part. */
5391 static void
5392 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5393 basic_block cond_bb, void *cond_e)
5395 block_stmt_iterator bsi;
5396 tree goto1 = NULL_TREE;
5397 tree goto2 = NULL_TREE;
5398 tree new_cond_expr = NULL_TREE;
5399 tree cond_expr = (tree) cond_e;
5400 edge e0;
5402 /* Build new conditional expr */
5403 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5404 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5405 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5407 /* Add new cond in cond_bb. */
5408 bsi = bsi_start (cond_bb);
5409 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5410 /* Adjust edges appropriately to connect new head with first head
5411 as well as second head. */
5412 e0 = single_succ_edge (cond_bb);
5413 e0->flags &= ~EDGE_FALLTHRU;
5414 e0->flags |= EDGE_FALSE_VALUE;
5417 struct cfg_hooks tree_cfg_hooks = {
5418 "tree",
5419 tree_verify_flow_info,
5420 tree_dump_bb, /* dump_bb */
5421 create_bb, /* create_basic_block */
5422 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5423 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5424 remove_bb, /* delete_basic_block */
5425 tree_split_block, /* split_block */
5426 tree_move_block_after, /* move_block_after */
5427 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5428 tree_merge_blocks, /* merge_blocks */
5429 tree_predict_edge, /* predict_edge */
5430 tree_predicted_by_p, /* predicted_by_p */
5431 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5432 tree_duplicate_bb, /* duplicate_block */
5433 tree_split_edge, /* split_edge */
5434 tree_make_forwarder_block, /* make_forward_block */
5435 NULL, /* tidy_fallthru_edge */
5436 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5437 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5438 tree_flow_call_edges_add, /* flow_call_edges_add */
5439 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5440 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5441 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5442 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5443 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5444 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5445 flush_pending_stmts /* flush_pending_stmts */
5449 /* Split all critical edges. */
5451 static void
5452 split_critical_edges (void)
5454 basic_block bb;
5455 edge e;
5456 edge_iterator ei;
5458 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5459 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5460 mappings around the calls to split_edge. */
5461 start_recording_case_labels ();
5462 FOR_ALL_BB (bb)
5464 FOR_EACH_EDGE (e, ei, bb->succs)
5465 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5467 split_edge (e);
5470 end_recording_case_labels ();
5473 struct tree_opt_pass pass_split_crit_edges =
5475 "crited", /* name */
5476 NULL, /* gate */
5477 split_critical_edges, /* execute */
5478 NULL, /* sub */
5479 NULL, /* next */
5480 0, /* static_pass_number */
5481 TV_TREE_SPLIT_EDGES, /* tv_id */
5482 PROP_cfg, /* properties required */
5483 PROP_no_crit_edges, /* properties_provided */
5484 0, /* properties_destroyed */
5485 0, /* todo_flags_start */
5486 TODO_dump_func, /* todo_flags_finish */
5487 0 /* letter */
5491 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5492 a temporary, make sure and register it to be renamed if necessary,
5493 and finally return the temporary. Put the statements to compute
5494 EXP before the current statement in BSI. */
5496 tree
5497 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5499 tree t, new_stmt, orig_stmt;
5501 if (is_gimple_val (exp))
5502 return exp;
5504 t = make_rename_temp (type, NULL);
5505 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5507 orig_stmt = bsi_stmt (*bsi);
5508 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5509 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5511 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5513 return t;
5516 /* Build a ternary operation and gimplify it. Emit code before BSI.
5517 Return the gimple_val holding the result. */
5519 tree
5520 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5521 tree type, tree a, tree b, tree c)
5523 tree ret;
5525 ret = fold_build3 (code, type, a, b, c);
5526 STRIP_NOPS (ret);
5528 return gimplify_val (bsi, type, ret);
5531 /* Build a binary operation and gimplify it. Emit code before BSI.
5532 Return the gimple_val holding the result. */
5534 tree
5535 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5536 tree type, tree a, tree b)
5538 tree ret;
5540 ret = fold_build2 (code, type, a, b);
5541 STRIP_NOPS (ret);
5543 return gimplify_val (bsi, type, ret);
5546 /* Build a unary operation and gimplify it. Emit code before BSI.
5547 Return the gimple_val holding the result. */
5549 tree
5550 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5551 tree a)
5553 tree ret;
5555 ret = fold_build1 (code, type, a);
5556 STRIP_NOPS (ret);
5558 return gimplify_val (bsi, type, ret);
5563 /* Emit return warnings. */
5565 static void
5566 execute_warn_function_return (void)
5568 #ifdef USE_MAPPED_LOCATION
5569 source_location location;
5570 #else
5571 location_t *locus;
5572 #endif
5573 tree last;
5574 edge e;
5575 edge_iterator ei;
5577 /* If we have a path to EXIT, then we do return. */
5578 if (TREE_THIS_VOLATILE (cfun->decl)
5579 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5581 #ifdef USE_MAPPED_LOCATION
5582 location = UNKNOWN_LOCATION;
5583 #else
5584 locus = NULL;
5585 #endif
5586 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5588 last = last_stmt (e->src);
5589 if (TREE_CODE (last) == RETURN_EXPR
5590 #ifdef USE_MAPPED_LOCATION
5591 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5592 #else
5593 && (locus = EXPR_LOCUS (last)) != NULL)
5594 #endif
5595 break;
5597 #ifdef USE_MAPPED_LOCATION
5598 if (location == UNKNOWN_LOCATION)
5599 location = cfun->function_end_locus;
5600 warning (0, "%H%<noreturn%> function does return", &location);
5601 #else
5602 if (!locus)
5603 locus = &cfun->function_end_locus;
5604 warning (0, "%H%<noreturn%> function does return", locus);
5605 #endif
5608 /* If we see "return;" in some basic block, then we do reach the end
5609 without returning a value. */
5610 else if (warn_return_type
5611 && !TREE_NO_WARNING (cfun->decl)
5612 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5613 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5615 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5617 tree last = last_stmt (e->src);
5618 if (TREE_CODE (last) == RETURN_EXPR
5619 && TREE_OPERAND (last, 0) == NULL
5620 && !TREE_NO_WARNING (last))
5622 #ifdef USE_MAPPED_LOCATION
5623 location = EXPR_LOCATION (last);
5624 if (location == UNKNOWN_LOCATION)
5625 location = cfun->function_end_locus;
5626 warning (0, "%Hcontrol reaches end of non-void function", &location);
5627 #else
5628 locus = EXPR_LOCUS (last);
5629 if (!locus)
5630 locus = &cfun->function_end_locus;
5631 warning (0, "%Hcontrol reaches end of non-void function", locus);
5632 #endif
5633 TREE_NO_WARNING (cfun->decl) = 1;
5634 break;
5641 /* Given a basic block B which ends with a conditional and has
5642 precisely two successors, determine which of the edges is taken if
5643 the conditional is true and which is taken if the conditional is
5644 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5646 void
5647 extract_true_false_edges_from_block (basic_block b,
5648 edge *true_edge,
5649 edge *false_edge)
5651 edge e = EDGE_SUCC (b, 0);
5653 if (e->flags & EDGE_TRUE_VALUE)
5655 *true_edge = e;
5656 *false_edge = EDGE_SUCC (b, 1);
5658 else
5660 *false_edge = e;
5661 *true_edge = EDGE_SUCC (b, 1);
5665 struct tree_opt_pass pass_warn_function_return =
5667 NULL, /* name */
5668 NULL, /* gate */
5669 execute_warn_function_return, /* execute */
5670 NULL, /* sub */
5671 NULL, /* next */
5672 0, /* static_pass_number */
5673 0, /* tv_id */
5674 PROP_cfg, /* properties_required */
5675 0, /* properties_provided */
5676 0, /* properties_destroyed */
5677 0, /* todo_flags_start */
5678 0, /* todo_flags_finish */
5679 0 /* letter */
5682 /* Emit noreturn warnings. */
5684 static void
5685 execute_warn_function_noreturn (void)
5687 if (warn_missing_noreturn
5688 && !TREE_THIS_VOLATILE (cfun->decl)
5689 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5690 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5691 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5692 "for attribute %<noreturn%>",
5693 cfun->decl);
5696 struct tree_opt_pass pass_warn_function_noreturn =
5698 NULL, /* name */
5699 NULL, /* gate */
5700 execute_warn_function_noreturn, /* execute */
5701 NULL, /* sub */
5702 NULL, /* next */
5703 0, /* static_pass_number */
5704 0, /* tv_id */
5705 PROP_cfg, /* properties_required */
5706 0, /* properties_provided */
5707 0, /* properties_destroyed */
5708 0, /* todo_flags_start */
5709 0, /* todo_flags_finish */
5710 0 /* letter */