Daily bump.
[official-gcc.git] / gcc / tree-cfg.c
blob320bc8ef61dbc86bd1be2cf3e30092440df0a043
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
90 /* Edges. */
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree, const_tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block, basic_block);
108 static bool tree_can_merge_blocks_p (basic_block, basic_block);
109 static void remove_bb (basic_block);
110 static edge find_taken_edge_computed_goto (basic_block, tree);
111 static edge find_taken_edge_cond_expr (basic_block, tree);
112 static edge find_taken_edge_switch_expr (basic_block, tree);
113 static tree find_case_label_for_value (tree, tree);
115 void
116 init_empty_tree_cfg (void)
118 /* Initialize the basic block array. */
119 init_flow ();
120 profile_status = PROFILE_ABSENT;
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
138 /*---------------------------------------------------------------------------
139 Create basic blocks
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
145 static void
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
153 init_empty_tree_cfg ();
155 found_computed_goto = 0;
156 make_blocks (*tp);
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
168 create_empty_bb (ENTRY_BLOCK_PTR);
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
182 /* Create the edges of the flowgraph. */
183 make_edges ();
184 cleanup_dead_labels ();
186 /* Debugging dumps. */
188 /* Write the flowgraph to a VCG file. */
190 int local_dump_flags;
191 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
192 if (vcg_file)
194 tree_cfg2vcg (vcg_file);
195 dump_end (TDI_vcg, vcg_file);
199 #ifdef ENABLE_CHECKING
200 verify_stmts ();
201 #endif
203 /* Dump a textual representation of the flowgraph. */
204 if (dump_file)
205 dump_tree_cfg (dump_file, dump_flags);
208 static unsigned int
209 execute_build_cfg (void)
211 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
212 return 0;
215 struct tree_opt_pass pass_build_cfg =
217 "cfg", /* name */
218 NULL, /* gate */
219 execute_build_cfg, /* execute */
220 NULL, /* sub */
221 NULL, /* next */
222 0, /* static_pass_number */
223 TV_TREE_CFG, /* tv_id */
224 PROP_gimple_leh, /* properties_required */
225 PROP_cfg, /* properties_provided */
226 0, /* properties_destroyed */
227 0, /* todo_flags_start */
228 TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
229 0 /* letter */
232 /* Search the CFG for any computed gotos. If found, factor them to a
233 common computed goto site. Also record the location of that site so
234 that we can un-factor the gotos after we have converted back to
235 normal form. */
237 static void
238 factor_computed_gotos (void)
240 basic_block bb;
241 tree factored_label_decl = NULL;
242 tree var = NULL;
243 tree factored_computed_goto_label = NULL;
244 tree factored_computed_goto = NULL;
246 /* We know there are one or more computed gotos in this function.
247 Examine the last statement in each basic block to see if the block
248 ends with a computed goto. */
250 FOR_EACH_BB (bb)
252 block_stmt_iterator bsi = bsi_last (bb);
253 tree last;
255 if (bsi_end_p (bsi))
256 continue;
257 last = bsi_stmt (bsi);
259 /* Ignore the computed goto we create when we factor the original
260 computed gotos. */
261 if (last == factored_computed_goto)
262 continue;
264 /* If the last statement is a computed goto, factor it. */
265 if (computed_goto_p (last))
267 tree assignment;
269 /* The first time we find a computed goto we need to create
270 the factored goto block and the variable each original
271 computed goto will use for their goto destination. */
272 if (! factored_computed_goto)
274 basic_block new_bb = create_empty_bb (bb);
275 block_stmt_iterator new_bsi = bsi_start (new_bb);
277 /* Create the destination of the factored goto. Each original
278 computed goto will put its desired destination into this
279 variable and jump to the label we create immediately
280 below. */
281 var = create_tmp_var (ptr_type_node, "gotovar");
283 /* Build a label for the new block which will contain the
284 factored computed goto. */
285 factored_label_decl = create_artificial_label ();
286 factored_computed_goto_label
287 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
288 bsi_insert_after (&new_bsi, factored_computed_goto_label,
289 BSI_NEW_STMT);
291 /* Build our new computed goto. */
292 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
293 bsi_insert_after (&new_bsi, factored_computed_goto,
294 BSI_NEW_STMT);
297 /* Copy the original computed goto's destination into VAR. */
298 assignment = build_gimple_modify_stmt (var,
299 GOTO_DESTINATION (last));
300 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
302 /* And re-vector the computed goto to the new destination. */
303 GOTO_DESTINATION (last) = factored_label_decl;
309 /* Build a flowgraph for the statement_list STMT_LIST. */
311 static void
312 make_blocks (tree stmt_list)
314 tree_stmt_iterator i = tsi_start (stmt_list);
315 tree stmt = NULL;
316 bool start_new_block = true;
317 bool first_stmt_of_list = true;
318 basic_block bb = ENTRY_BLOCK_PTR;
320 while (!tsi_end_p (i))
322 tree prev_stmt;
324 prev_stmt = stmt;
325 stmt = tsi_stmt (i);
327 /* If the statement starts a new basic block or if we have determined
328 in a previous pass that we need to create a new block for STMT, do
329 so now. */
330 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
332 if (!first_stmt_of_list)
333 stmt_list = tsi_split_statement_list_before (&i);
334 bb = create_basic_block (stmt_list, NULL, bb);
335 start_new_block = false;
338 /* Now add STMT to BB and create the subgraphs for special statement
339 codes. */
340 set_bb_for_stmt (stmt, bb);
342 if (computed_goto_p (stmt))
343 found_computed_goto = true;
345 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
346 next iteration. */
347 if (stmt_ends_bb_p (stmt))
348 start_new_block = true;
350 tsi_next (&i);
351 first_stmt_of_list = false;
356 /* Create and return a new empty basic block after bb AFTER. */
358 static basic_block
359 create_bb (void *h, void *e, basic_block after)
361 basic_block bb;
363 gcc_assert (!e);
365 /* Create and initialize a new basic block. Since alloc_block uses
366 ggc_alloc_cleared to allocate a basic block, we do not have to
367 clear the newly allocated basic block here. */
368 bb = alloc_block ();
370 bb->index = last_basic_block;
371 bb->flags = BB_NEW;
372 bb->il.tree = GGC_CNEW (struct tree_bb_info);
373 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
375 /* Add the new block to the linked list of blocks. */
376 link_block (bb, after);
378 /* Grow the basic block array if needed. */
379 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
381 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
382 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
385 /* Add the newly created block to the array. */
386 SET_BASIC_BLOCK (last_basic_block, bb);
388 n_basic_blocks++;
389 last_basic_block++;
391 return bb;
395 /*---------------------------------------------------------------------------
396 Edge creation
397 ---------------------------------------------------------------------------*/
399 /* Fold COND_EXPR_COND of each COND_EXPR. */
401 void
402 fold_cond_expr_cond (void)
404 basic_block bb;
406 FOR_EACH_BB (bb)
408 tree stmt = last_stmt (bb);
410 if (stmt
411 && TREE_CODE (stmt) == COND_EXPR)
413 tree cond;
414 bool zerop, onep;
416 fold_defer_overflow_warnings ();
417 cond = fold (COND_EXPR_COND (stmt));
418 zerop = integer_zerop (cond);
419 onep = integer_onep (cond);
420 fold_undefer_overflow_warnings (zerop || onep,
421 stmt,
422 WARN_STRICT_OVERFLOW_CONDITIONAL);
423 if (zerop)
424 COND_EXPR_COND (stmt) = boolean_false_node;
425 else if (onep)
426 COND_EXPR_COND (stmt) = boolean_true_node;
431 /* Join all the blocks in the flowgraph. */
433 static void
434 make_edges (void)
436 basic_block bb;
437 struct omp_region *cur_region = NULL;
439 /* Create an edge from entry to the first block with executable
440 statements in it. */
441 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
443 /* Traverse the basic block array placing edges. */
444 FOR_EACH_BB (bb)
446 tree last = last_stmt (bb);
447 bool fallthru;
449 if (last)
451 enum tree_code code = TREE_CODE (last);
452 switch (code)
454 case GOTO_EXPR:
455 make_goto_expr_edges (bb);
456 fallthru = false;
457 break;
458 case RETURN_EXPR:
459 make_edge (bb, EXIT_BLOCK_PTR, 0);
460 fallthru = false;
461 break;
462 case COND_EXPR:
463 make_cond_expr_edges (bb);
464 fallthru = false;
465 break;
466 case SWITCH_EXPR:
467 make_switch_expr_edges (bb);
468 fallthru = false;
469 break;
470 case RESX_EXPR:
471 make_eh_edges (last);
472 fallthru = false;
473 break;
475 case CALL_EXPR:
476 /* If this function receives a nonlocal goto, then we need to
477 make edges from this call site to all the nonlocal goto
478 handlers. */
479 if (tree_can_make_abnormal_goto (last))
480 make_abnormal_goto_edges (bb, true);
482 /* If this statement has reachable exception handlers, then
483 create abnormal edges to them. */
484 make_eh_edges (last);
486 /* Some calls are known not to return. */
487 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
488 break;
490 case MODIFY_EXPR:
491 gcc_unreachable ();
493 case GIMPLE_MODIFY_STMT:
494 if (is_ctrl_altering_stmt (last))
496 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
497 the CALL_EXPR may have an abnormal edge. Search the RHS
498 for this case and create any required edges. */
499 if (tree_can_make_abnormal_goto (last))
500 make_abnormal_goto_edges (bb, true);
502 make_eh_edges (last);
504 fallthru = true;
505 break;
507 case OMP_PARALLEL:
508 case OMP_FOR:
509 case OMP_SINGLE:
510 case OMP_MASTER:
511 case OMP_ORDERED:
512 case OMP_CRITICAL:
513 case OMP_SECTION:
514 cur_region = new_omp_region (bb, code, cur_region);
515 fallthru = true;
516 break;
518 case OMP_SECTIONS:
519 cur_region = new_omp_region (bb, code, cur_region);
520 fallthru = true;
521 break;
523 case OMP_SECTIONS_SWITCH:
524 fallthru = false;
525 break;
527 case OMP_RETURN:
528 /* In the case of an OMP_SECTION, the edge will go somewhere
529 other than the next block. This will be created later. */
530 cur_region->exit = bb;
531 fallthru = cur_region->type != OMP_SECTION;
532 cur_region = cur_region->outer;
533 break;
535 case OMP_CONTINUE:
536 cur_region->cont = bb;
537 switch (cur_region->type)
539 case OMP_FOR:
540 /* Make the loopback edge. */
541 make_edge (bb, single_succ (cur_region->entry), 0);
543 /* Create an edge from OMP_FOR to exit, which corresponds to
544 the case that the body of the loop is not executed at
545 all. */
546 make_edge (cur_region->entry, bb->next_bb, 0);
547 fallthru = true;
548 break;
550 case OMP_SECTIONS:
551 /* Wire up the edges into and out of the nested sections. */
553 basic_block switch_bb = single_succ (cur_region->entry);
555 struct omp_region *i;
556 for (i = cur_region->inner; i ; i = i->next)
558 gcc_assert (i->type == OMP_SECTION);
559 make_edge (switch_bb, i->entry, 0);
560 make_edge (i->exit, bb, EDGE_FALLTHRU);
563 /* Make the loopback edge to the block with
564 OMP_SECTIONS_SWITCH. */
565 make_edge (bb, switch_bb, 0);
567 /* Make the edge from the switch to exit. */
568 make_edge (switch_bb, bb->next_bb, 0);
569 fallthru = false;
571 break;
573 default:
574 gcc_unreachable ();
576 break;
578 default:
579 gcc_assert (!stmt_ends_bb_p (last));
580 fallthru = true;
583 else
584 fallthru = true;
586 if (fallthru)
587 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
590 if (root_omp_region)
591 free_omp_regions ();
593 /* Fold COND_EXPR_COND of each COND_EXPR. */
594 fold_cond_expr_cond ();
598 /* Create the edges for a COND_EXPR starting at block BB.
599 At this point, both clauses must contain only simple gotos. */
601 static void
602 make_cond_expr_edges (basic_block bb)
604 tree entry = last_stmt (bb);
605 basic_block then_bb, else_bb;
606 tree then_label, else_label;
607 edge e;
609 gcc_assert (entry);
610 gcc_assert (TREE_CODE (entry) == COND_EXPR);
612 /* Entry basic blocks for each component. */
613 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
614 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
615 then_bb = label_to_block (then_label);
616 else_bb = label_to_block (else_label);
618 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
619 #ifdef USE_MAPPED_LOCATION
620 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
621 #else
622 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
623 #endif
624 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
625 if (e)
627 #ifdef USE_MAPPED_LOCATION
628 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
629 #else
630 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
631 #endif
634 /* We do not need the gotos anymore. */
635 COND_EXPR_THEN (entry) = NULL_TREE;
636 COND_EXPR_ELSE (entry) = NULL_TREE;
640 /* Called for each element in the hash table (P) as we delete the
641 edge to cases hash table.
643 Clear all the TREE_CHAINs to prevent problems with copying of
644 SWITCH_EXPRs and structure sharing rules, then free the hash table
645 element. */
647 static bool
648 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
649 void *data ATTRIBUTE_UNUSED)
651 tree t, next;
653 for (t = (tree) *value; t; t = next)
655 next = TREE_CHAIN (t);
656 TREE_CHAIN (t) = NULL;
659 *value = NULL;
660 return false;
663 /* Start recording information mapping edges to case labels. */
665 void
666 start_recording_case_labels (void)
668 gcc_assert (edge_to_cases == NULL);
669 edge_to_cases = pointer_map_create ();
672 /* Return nonzero if we are recording information for case labels. */
674 static bool
675 recording_case_labels_p (void)
677 return (edge_to_cases != NULL);
680 /* Stop recording information mapping edges to case labels and
681 remove any information we have recorded. */
682 void
683 end_recording_case_labels (void)
685 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
686 pointer_map_destroy (edge_to_cases);
687 edge_to_cases = NULL;
690 /* If we are inside a {start,end}_recording_cases block, then return
691 a chain of CASE_LABEL_EXPRs from T which reference E.
693 Otherwise return NULL. */
695 static tree
696 get_cases_for_edge (edge e, tree t)
698 void **slot;
699 size_t i, n;
700 tree vec;
702 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
703 chains available. Return NULL so the caller can detect this case. */
704 if (!recording_case_labels_p ())
705 return NULL;
707 slot = pointer_map_contains (edge_to_cases, e);
708 if (slot)
709 return (tree) *slot;
711 /* If we did not find E in the hash table, then this must be the first
712 time we have been queried for information about E & T. Add all the
713 elements from T to the hash table then perform the query again. */
715 vec = SWITCH_LABELS (t);
716 n = TREE_VEC_LENGTH (vec);
717 for (i = 0; i < n; i++)
719 tree elt = TREE_VEC_ELT (vec, i);
720 tree lab = CASE_LABEL (elt);
721 basic_block label_bb = label_to_block (lab);
722 edge this_edge = find_edge (e->src, label_bb);
724 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
725 a new chain. */
726 slot = pointer_map_insert (edge_to_cases, this_edge);
727 TREE_CHAIN (elt) = (tree) *slot;
728 *slot = elt;
731 return (tree) *pointer_map_contains (edge_to_cases, e);
734 /* Create the edges for a SWITCH_EXPR starting at block BB.
735 At this point, the switch body has been lowered and the
736 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
738 static void
739 make_switch_expr_edges (basic_block bb)
741 tree entry = last_stmt (bb);
742 size_t i, n;
743 tree vec;
745 vec = SWITCH_LABELS (entry);
746 n = TREE_VEC_LENGTH (vec);
748 for (i = 0; i < n; ++i)
750 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
751 basic_block label_bb = label_to_block (lab);
752 make_edge (bb, label_bb, 0);
757 /* Return the basic block holding label DEST. */
759 basic_block
760 label_to_block_fn (struct function *ifun, tree dest)
762 int uid = LABEL_DECL_UID (dest);
764 /* We would die hard when faced by an undefined label. Emit a label to
765 the very first basic block. This will hopefully make even the dataflow
766 and undefined variable warnings quite right. */
767 if ((errorcount || sorrycount) && uid < 0)
769 block_stmt_iterator bsi =
770 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
771 tree stmt;
773 stmt = build1 (LABEL_EXPR, void_type_node, dest);
774 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
775 uid = LABEL_DECL_UID (dest);
777 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
778 <= (unsigned int) uid)
779 return NULL;
780 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
783 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
784 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
786 void
787 make_abnormal_goto_edges (basic_block bb, bool for_call)
789 basic_block target_bb;
790 block_stmt_iterator bsi;
792 FOR_EACH_BB (target_bb)
793 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
795 tree target = bsi_stmt (bsi);
797 if (TREE_CODE (target) != LABEL_EXPR)
798 break;
800 target = LABEL_EXPR_LABEL (target);
802 /* Make an edge to every label block that has been marked as a
803 potential target for a computed goto or a non-local goto. */
804 if ((FORCED_LABEL (target) && !for_call)
805 || (DECL_NONLOCAL (target) && for_call))
807 make_edge (bb, target_bb, EDGE_ABNORMAL);
808 break;
813 /* Create edges for a goto statement at block BB. */
815 static void
816 make_goto_expr_edges (basic_block bb)
818 block_stmt_iterator last = bsi_last (bb);
819 tree goto_t = bsi_stmt (last);
821 /* A simple GOTO creates normal edges. */
822 if (simple_goto_p (goto_t))
824 tree dest = GOTO_DESTINATION (goto_t);
825 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
826 #ifdef USE_MAPPED_LOCATION
827 e->goto_locus = EXPR_LOCATION (goto_t);
828 #else
829 e->goto_locus = EXPR_LOCUS (goto_t);
830 #endif
831 bsi_remove (&last, true);
832 return;
835 /* A computed GOTO creates abnormal edges. */
836 make_abnormal_goto_edges (bb, false);
840 /*---------------------------------------------------------------------------
841 Flowgraph analysis
842 ---------------------------------------------------------------------------*/
844 /* Cleanup useless labels in basic blocks. This is something we wish
845 to do early because it allows us to group case labels before creating
846 the edges for the CFG, and it speeds up block statement iterators in
847 all passes later on.
848 We rerun this pass after CFG is created, to get rid of the labels that
849 are no longer referenced. After then we do not run it any more, since
850 (almost) no new labels should be created. */
852 /* A map from basic block index to the leading label of that block. */
853 static struct label_record
855 /* The label. */
856 tree label;
858 /* True if the label is referenced from somewhere. */
859 bool used;
860 } *label_for_bb;
862 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
863 static void
864 update_eh_label (struct eh_region *region)
866 tree old_label = get_eh_region_tree_label (region);
867 if (old_label)
869 tree new_label;
870 basic_block bb = label_to_block (old_label);
872 /* ??? After optimizing, there may be EH regions with labels
873 that have already been removed from the function body, so
874 there is no basic block for them. */
875 if (! bb)
876 return;
878 new_label = label_for_bb[bb->index].label;
879 label_for_bb[bb->index].used = true;
880 set_eh_region_tree_label (region, new_label);
884 /* Given LABEL return the first label in the same basic block. */
885 static tree
886 main_block_label (tree label)
888 basic_block bb = label_to_block (label);
889 tree main_label = label_for_bb[bb->index].label;
891 /* label_to_block possibly inserted undefined label into the chain. */
892 if (!main_label)
894 label_for_bb[bb->index].label = label;
895 main_label = label;
898 label_for_bb[bb->index].used = true;
899 return main_label;
902 /* Cleanup redundant labels. This is a three-step process:
903 1) Find the leading label for each block.
904 2) Redirect all references to labels to the leading labels.
905 3) Cleanup all useless labels. */
907 void
908 cleanup_dead_labels (void)
910 basic_block bb;
911 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
913 /* Find a suitable label for each block. We use the first user-defined
914 label if there is one, or otherwise just the first label we see. */
915 FOR_EACH_BB (bb)
917 block_stmt_iterator i;
919 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
921 tree label, stmt = bsi_stmt (i);
923 if (TREE_CODE (stmt) != LABEL_EXPR)
924 break;
926 label = LABEL_EXPR_LABEL (stmt);
928 /* If we have not yet seen a label for the current block,
929 remember this one and see if there are more labels. */
930 if (!label_for_bb[bb->index].label)
932 label_for_bb[bb->index].label = label;
933 continue;
936 /* If we did see a label for the current block already, but it
937 is an artificially created label, replace it if the current
938 label is a user defined label. */
939 if (!DECL_ARTIFICIAL (label)
940 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
942 label_for_bb[bb->index].label = label;
943 break;
948 /* Now redirect all jumps/branches to the selected label.
949 First do so for each block ending in a control statement. */
950 FOR_EACH_BB (bb)
952 tree stmt = last_stmt (bb);
953 if (!stmt)
954 continue;
956 switch (TREE_CODE (stmt))
958 case COND_EXPR:
960 tree true_branch, false_branch;
962 true_branch = COND_EXPR_THEN (stmt);
963 false_branch = COND_EXPR_ELSE (stmt);
965 if (true_branch)
966 GOTO_DESTINATION (true_branch)
967 = main_block_label (GOTO_DESTINATION (true_branch));
968 if (false_branch)
969 GOTO_DESTINATION (false_branch)
970 = main_block_label (GOTO_DESTINATION (false_branch));
972 break;
975 case SWITCH_EXPR:
977 size_t i;
978 tree vec = SWITCH_LABELS (stmt);
979 size_t n = TREE_VEC_LENGTH (vec);
981 /* Replace all destination labels. */
982 for (i = 0; i < n; ++i)
984 tree elt = TREE_VEC_ELT (vec, i);
985 tree label = main_block_label (CASE_LABEL (elt));
986 CASE_LABEL (elt) = label;
988 break;
991 /* We have to handle GOTO_EXPRs until they're removed, and we don't
992 remove them until after we've created the CFG edges. */
993 case GOTO_EXPR:
994 if (! computed_goto_p (stmt))
996 GOTO_DESTINATION (stmt)
997 = main_block_label (GOTO_DESTINATION (stmt));
998 break;
1001 default:
1002 break;
1006 for_each_eh_region (update_eh_label);
1008 /* Finally, purge dead labels. All user-defined labels and labels that
1009 can be the target of non-local gotos and labels which have their
1010 address taken are preserved. */
1011 FOR_EACH_BB (bb)
1013 block_stmt_iterator i;
1014 tree label_for_this_bb = label_for_bb[bb->index].label;
1016 if (!label_for_this_bb)
1017 continue;
1019 /* If the main label of the block is unused, we may still remove it. */
1020 if (!label_for_bb[bb->index].used)
1021 label_for_this_bb = NULL;
1023 for (i = bsi_start (bb); !bsi_end_p (i); )
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1028 break;
1030 label = LABEL_EXPR_LABEL (stmt);
1032 if (label == label_for_this_bb
1033 || ! DECL_ARTIFICIAL (label)
1034 || DECL_NONLOCAL (label)
1035 || FORCED_LABEL (label))
1036 bsi_next (&i);
1037 else
1038 bsi_remove (&i, true);
1042 free (label_for_bb);
1045 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1046 and scan the sorted vector of cases. Combine the ones jumping to the
1047 same label.
1048 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1050 void
1051 group_case_labels (void)
1053 basic_block bb;
1055 FOR_EACH_BB (bb)
1057 tree stmt = last_stmt (bb);
1058 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1060 tree labels = SWITCH_LABELS (stmt);
1061 int old_size = TREE_VEC_LENGTH (labels);
1062 int i, j, new_size = old_size;
1063 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1064 tree default_label;
1066 /* The default label is always the last case in a switch
1067 statement after gimplification. */
1068 default_label = CASE_LABEL (default_case);
1070 /* Look for possible opportunities to merge cases.
1071 Ignore the last element of the label vector because it
1072 must be the default case. */
1073 i = 0;
1074 while (i < old_size - 1)
1076 tree base_case, base_label, base_high;
1077 base_case = TREE_VEC_ELT (labels, i);
1079 gcc_assert (base_case);
1080 base_label = CASE_LABEL (base_case);
1082 /* Discard cases that have the same destination as the
1083 default case. */
1084 if (base_label == default_label)
1086 TREE_VEC_ELT (labels, i) = NULL_TREE;
1087 i++;
1088 new_size--;
1089 continue;
1092 base_high = CASE_HIGH (base_case) ?
1093 CASE_HIGH (base_case) : CASE_LOW (base_case);
1094 i++;
1095 /* Try to merge case labels. Break out when we reach the end
1096 of the label vector or when we cannot merge the next case
1097 label with the current one. */
1098 while (i < old_size - 1)
1100 tree merge_case = TREE_VEC_ELT (labels, i);
1101 tree merge_label = CASE_LABEL (merge_case);
1102 tree t = int_const_binop (PLUS_EXPR, base_high,
1103 integer_one_node, 1);
1105 /* Merge the cases if they jump to the same place,
1106 and their ranges are consecutive. */
1107 if (merge_label == base_label
1108 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1110 base_high = CASE_HIGH (merge_case) ?
1111 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1112 CASE_HIGH (base_case) = base_high;
1113 TREE_VEC_ELT (labels, i) = NULL_TREE;
1114 new_size--;
1115 i++;
1117 else
1118 break;
1122 /* Compress the case labels in the label vector, and adjust the
1123 length of the vector. */
1124 for (i = 0, j = 0; i < new_size; i++)
1126 while (! TREE_VEC_ELT (labels, j))
1127 j++;
1128 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1130 TREE_VEC_LENGTH (labels) = new_size;
1135 /* Checks whether we can merge block B into block A. */
1137 static bool
1138 tree_can_merge_blocks_p (basic_block a, basic_block b)
1140 const_tree stmt;
1141 block_stmt_iterator bsi;
1142 tree phi;
1144 if (!single_succ_p (a))
1145 return false;
1147 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1148 return false;
1150 if (single_succ (a) != b)
1151 return false;
1153 if (!single_pred_p (b))
1154 return false;
1156 if (b == EXIT_BLOCK_PTR)
1157 return false;
1159 /* If A ends by a statement causing exceptions or something similar, we
1160 cannot merge the blocks. */
1161 /* This CONST_CAST is okay because last_stmt doesn't modify its
1162 argument and the return value is assign to a const_tree. */
1163 stmt = last_stmt (CONST_CAST_BB (a));
1164 if (stmt && stmt_ends_bb_p (stmt))
1165 return false;
1167 /* Do not allow a block with only a non-local label to be merged. */
1168 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1169 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1170 return false;
1172 /* It must be possible to eliminate all phi nodes in B. If ssa form
1173 is not up-to-date, we cannot eliminate any phis; however, if only
1174 some symbols as whole are marked for renaming, this is not a problem,
1175 as phi nodes for those symbols are irrelevant in updating anyway. */
1176 phi = phi_nodes (b);
1177 if (phi)
1179 if (name_mappings_registered_p ())
1180 return false;
1182 for (; phi; phi = PHI_CHAIN (phi))
1183 if (!is_gimple_reg (PHI_RESULT (phi))
1184 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1185 return false;
1188 /* Do not remove user labels. */
1189 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1191 stmt = bsi_stmt (bsi);
1192 if (TREE_CODE (stmt) != LABEL_EXPR)
1193 break;
1194 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1195 return false;
1198 /* Protect the loop latches. */
1199 if (current_loops
1200 && b->loop_father->latch == b)
1201 return false;
1203 return true;
1206 /* Replaces all uses of NAME by VAL. */
1208 void
1209 replace_uses_by (tree name, tree val)
1211 imm_use_iterator imm_iter;
1212 use_operand_p use;
1213 tree stmt;
1214 edge e;
1216 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1218 if (TREE_CODE (stmt) != PHI_NODE)
1219 push_stmt_changes (&stmt);
1221 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1223 replace_exp (use, val);
1225 if (TREE_CODE (stmt) == PHI_NODE)
1227 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1228 if (e->flags & EDGE_ABNORMAL)
1230 /* This can only occur for virtual operands, since
1231 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1232 would prevent replacement. */
1233 gcc_assert (!is_gimple_reg (name));
1234 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1239 if (TREE_CODE (stmt) != PHI_NODE)
1241 tree rhs;
1243 fold_stmt_inplace (stmt);
1244 if (cfgcleanup_altered_bbs)
1245 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1247 /* FIXME. This should go in pop_stmt_changes. */
1248 rhs = get_rhs (stmt);
1249 if (TREE_CODE (rhs) == ADDR_EXPR)
1250 recompute_tree_invariant_for_addr_expr (rhs);
1252 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1254 pop_stmt_changes (&stmt);
1258 gcc_assert (has_zero_uses (name));
1260 /* Also update the trees stored in loop structures. */
1261 if (current_loops)
1263 struct loop *loop;
1264 loop_iterator li;
1266 FOR_EACH_LOOP (li, loop, 0)
1268 substitute_in_loop_info (loop, name, val);
1273 /* Merge block B into block A. */
1275 static void
1276 tree_merge_blocks (basic_block a, basic_block b)
1278 block_stmt_iterator bsi;
1279 tree_stmt_iterator last;
1280 tree phi;
1282 if (dump_file)
1283 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1285 /* Remove all single-valued PHI nodes from block B of the form
1286 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1287 bsi = bsi_last (a);
1288 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1290 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1291 tree copy;
1292 bool may_replace_uses = may_propagate_copy (def, use);
1294 /* In case we maintain loop closed ssa form, do not propagate arguments
1295 of loop exit phi nodes. */
1296 if (current_loops
1297 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1298 && is_gimple_reg (def)
1299 && TREE_CODE (use) == SSA_NAME
1300 && a->loop_father != b->loop_father)
1301 may_replace_uses = false;
1303 if (!may_replace_uses)
1305 gcc_assert (is_gimple_reg (def));
1307 /* Note that just emitting the copies is fine -- there is no problem
1308 with ordering of phi nodes. This is because A is the single
1309 predecessor of B, therefore results of the phi nodes cannot
1310 appear as arguments of the phi nodes. */
1311 copy = build_gimple_modify_stmt (def, use);
1312 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1313 SSA_NAME_DEF_STMT (def) = copy;
1314 remove_phi_node (phi, NULL, false);
1316 else
1318 replace_uses_by (def, use);
1319 remove_phi_node (phi, NULL, true);
1323 /* Ensure that B follows A. */
1324 move_block_after (b, a);
1326 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1327 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1329 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1330 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1332 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1334 tree label = bsi_stmt (bsi);
1336 bsi_remove (&bsi, false);
1337 /* Now that we can thread computed gotos, we might have
1338 a situation where we have a forced label in block B
1339 However, the label at the start of block B might still be
1340 used in other ways (think about the runtime checking for
1341 Fortran assigned gotos). So we can not just delete the
1342 label. Instead we move the label to the start of block A. */
1343 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1345 block_stmt_iterator dest_bsi = bsi_start (a);
1346 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1349 else
1351 change_bb_for_stmt (bsi_stmt (bsi), a);
1352 bsi_next (&bsi);
1356 /* Merge the chains. */
1357 last = tsi_last (bb_stmt_list (a));
1358 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1359 set_bb_stmt_list (b, NULL_TREE);
1361 if (cfgcleanup_altered_bbs)
1362 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1366 /* Return the one of two successors of BB that is not reachable by a
1367 reached by a complex edge, if there is one. Else, return BB. We use
1368 this in optimizations that use post-dominators for their heuristics,
1369 to catch the cases in C++ where function calls are involved. */
1371 basic_block
1372 single_noncomplex_succ (basic_block bb)
1374 edge e0, e1;
1375 if (EDGE_COUNT (bb->succs) != 2)
1376 return bb;
1378 e0 = EDGE_SUCC (bb, 0);
1379 e1 = EDGE_SUCC (bb, 1);
1380 if (e0->flags & EDGE_COMPLEX)
1381 return e1->dest;
1382 if (e1->flags & EDGE_COMPLEX)
1383 return e0->dest;
1385 return bb;
1389 /* Walk the function tree removing unnecessary statements.
1391 * Empty statement nodes are removed
1393 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1395 * Unnecessary COND_EXPRs are removed
1397 * Some unnecessary BIND_EXPRs are removed
1399 Clearly more work could be done. The trick is doing the analysis
1400 and removal fast enough to be a net improvement in compile times.
1402 Note that when we remove a control structure such as a COND_EXPR
1403 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1404 to ensure we eliminate all the useless code. */
1406 struct rus_data
1408 tree *last_goto;
1409 bool repeat;
1410 bool may_throw;
1411 bool may_branch;
1412 bool has_label;
1415 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1417 static bool
1418 remove_useless_stmts_warn_notreached (tree stmt)
1420 if (EXPR_HAS_LOCATION (stmt))
1422 location_t loc = EXPR_LOCATION (stmt);
1423 if (LOCATION_LINE (loc) > 0)
1425 warning (0, "%Hwill never be executed", &loc);
1426 return true;
1430 switch (TREE_CODE (stmt))
1432 case STATEMENT_LIST:
1434 tree_stmt_iterator i;
1435 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1436 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1437 return true;
1439 break;
1441 case COND_EXPR:
1442 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1443 return true;
1444 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1445 return true;
1446 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1447 return true;
1448 break;
1450 case TRY_FINALLY_EXPR:
1451 case TRY_CATCH_EXPR:
1452 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1453 return true;
1454 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1455 return true;
1456 break;
1458 case CATCH_EXPR:
1459 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1460 case EH_FILTER_EXPR:
1461 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1462 case BIND_EXPR:
1463 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1465 default:
1466 /* Not a live container. */
1467 break;
1470 return false;
1473 static void
1474 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1476 tree then_clause, else_clause, cond;
1477 bool save_has_label, then_has_label, else_has_label;
1479 save_has_label = data->has_label;
1480 data->has_label = false;
1481 data->last_goto = NULL;
1483 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1485 then_has_label = data->has_label;
1486 data->has_label = false;
1487 data->last_goto = NULL;
1489 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1491 else_has_label = data->has_label;
1492 data->has_label = save_has_label | then_has_label | else_has_label;
1494 then_clause = COND_EXPR_THEN (*stmt_p);
1495 else_clause = COND_EXPR_ELSE (*stmt_p);
1496 cond = fold (COND_EXPR_COND (*stmt_p));
1498 /* If neither arm does anything at all, we can remove the whole IF. */
1499 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1501 *stmt_p = build_empty_stmt ();
1502 data->repeat = true;
1505 /* If there are no reachable statements in an arm, then we can
1506 zap the entire conditional. */
1507 else if (integer_nonzerop (cond) && !else_has_label)
1509 if (warn_notreached)
1510 remove_useless_stmts_warn_notreached (else_clause);
1511 *stmt_p = then_clause;
1512 data->repeat = true;
1514 else if (integer_zerop (cond) && !then_has_label)
1516 if (warn_notreached)
1517 remove_useless_stmts_warn_notreached (then_clause);
1518 *stmt_p = else_clause;
1519 data->repeat = true;
1522 /* Check a couple of simple things on then/else with single stmts. */
1523 else
1525 tree then_stmt = expr_only (then_clause);
1526 tree else_stmt = expr_only (else_clause);
1528 /* Notice branches to a common destination. */
1529 if (then_stmt && else_stmt
1530 && TREE_CODE (then_stmt) == GOTO_EXPR
1531 && TREE_CODE (else_stmt) == GOTO_EXPR
1532 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1534 *stmt_p = then_stmt;
1535 data->repeat = true;
1538 /* If the THEN/ELSE clause merely assigns a value to a variable or
1539 parameter which is already known to contain that value, then
1540 remove the useless THEN/ELSE clause. */
1541 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1543 if (else_stmt
1544 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1545 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1546 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1547 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1549 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1550 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1551 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1552 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1554 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1555 ? then_stmt : else_stmt);
1556 tree *location = (TREE_CODE (cond) == EQ_EXPR
1557 ? &COND_EXPR_THEN (*stmt_p)
1558 : &COND_EXPR_ELSE (*stmt_p));
1560 if (stmt
1561 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1562 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1563 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1564 *location = alloc_stmt_list ();
1568 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1569 would be re-introduced during lowering. */
1570 data->last_goto = NULL;
1574 static void
1575 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1577 bool save_may_branch, save_may_throw;
1578 bool this_may_branch, this_may_throw;
1580 /* Collect may_branch and may_throw information for the body only. */
1581 save_may_branch = data->may_branch;
1582 save_may_throw = data->may_throw;
1583 data->may_branch = false;
1584 data->may_throw = false;
1585 data->last_goto = NULL;
1587 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1589 this_may_branch = data->may_branch;
1590 this_may_throw = data->may_throw;
1591 data->may_branch |= save_may_branch;
1592 data->may_throw |= save_may_throw;
1593 data->last_goto = NULL;
1595 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1597 /* If the body is empty, then we can emit the FINALLY block without
1598 the enclosing TRY_FINALLY_EXPR. */
1599 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1601 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1602 data->repeat = true;
1605 /* If the handler is empty, then we can emit the TRY block without
1606 the enclosing TRY_FINALLY_EXPR. */
1607 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1609 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1610 data->repeat = true;
1613 /* If the body neither throws, nor branches, then we can safely
1614 string the TRY and FINALLY blocks together. */
1615 else if (!this_may_branch && !this_may_throw)
1617 tree stmt = *stmt_p;
1618 *stmt_p = TREE_OPERAND (stmt, 0);
1619 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1620 data->repeat = true;
1625 static void
1626 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1628 bool save_may_throw, this_may_throw;
1629 tree_stmt_iterator i;
1630 tree stmt;
1632 /* Collect may_throw information for the body only. */
1633 save_may_throw = data->may_throw;
1634 data->may_throw = false;
1635 data->last_goto = NULL;
1637 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1639 this_may_throw = data->may_throw;
1640 data->may_throw = save_may_throw;
1642 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1643 if (!this_may_throw)
1645 if (warn_notreached)
1646 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1647 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1648 data->repeat = true;
1649 return;
1652 /* Process the catch clause specially. We may be able to tell that
1653 no exceptions propagate past this point. */
1655 this_may_throw = true;
1656 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1657 stmt = tsi_stmt (i);
1658 data->last_goto = NULL;
1660 switch (TREE_CODE (stmt))
1662 case CATCH_EXPR:
1663 for (; !tsi_end_p (i); tsi_next (&i))
1665 stmt = tsi_stmt (i);
1666 /* If we catch all exceptions, then the body does not
1667 propagate exceptions past this point. */
1668 if (CATCH_TYPES (stmt) == NULL)
1669 this_may_throw = false;
1670 data->last_goto = NULL;
1671 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1673 break;
1675 case EH_FILTER_EXPR:
1676 if (EH_FILTER_MUST_NOT_THROW (stmt))
1677 this_may_throw = false;
1678 else if (EH_FILTER_TYPES (stmt) == NULL)
1679 this_may_throw = false;
1680 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1681 break;
1683 default:
1684 /* Otherwise this is a cleanup. */
1685 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1687 /* If the cleanup is empty, then we can emit the TRY block without
1688 the enclosing TRY_CATCH_EXPR. */
1689 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1691 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1692 data->repeat = true;
1694 break;
1696 data->may_throw |= this_may_throw;
1700 static void
1701 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1703 tree block;
1705 /* First remove anything underneath the BIND_EXPR. */
1706 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1708 /* If the BIND_EXPR has no variables, then we can pull everything
1709 up one level and remove the BIND_EXPR, unless this is the toplevel
1710 BIND_EXPR for the current function or an inlined function.
1712 When this situation occurs we will want to apply this
1713 optimization again. */
1714 block = BIND_EXPR_BLOCK (*stmt_p);
1715 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1716 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1717 && (! block
1718 || ! BLOCK_ABSTRACT_ORIGIN (block)
1719 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1720 != FUNCTION_DECL)))
1722 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1723 data->repeat = true;
1728 static void
1729 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1731 tree dest = GOTO_DESTINATION (*stmt_p);
1733 data->may_branch = true;
1734 data->last_goto = NULL;
1736 /* Record the last goto expr, so that we can delete it if unnecessary. */
1737 if (TREE_CODE (dest) == LABEL_DECL)
1738 data->last_goto = stmt_p;
1742 static void
1743 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1745 tree label = LABEL_EXPR_LABEL (*stmt_p);
1747 data->has_label = true;
1749 /* We do want to jump across non-local label receiver code. */
1750 if (DECL_NONLOCAL (label))
1751 data->last_goto = NULL;
1753 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1755 *data->last_goto = build_empty_stmt ();
1756 data->repeat = true;
1759 /* ??? Add something here to delete unused labels. */
1763 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1764 decl. This allows us to eliminate redundant or useless
1765 calls to "const" functions.
1767 Gimplifier already does the same operation, but we may notice functions
1768 being const and pure once their calls has been gimplified, so we need
1769 to update the flag. */
1771 static void
1772 update_call_expr_flags (tree call)
1774 tree decl = get_callee_fndecl (call);
1775 if (!decl)
1776 return;
1777 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1778 TREE_SIDE_EFFECTS (call) = 0;
1779 if (TREE_NOTHROW (decl))
1780 TREE_NOTHROW (call) = 1;
1784 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1786 void
1787 notice_special_calls (tree t)
1789 int flags = call_expr_flags (t);
1791 if (flags & ECF_MAY_BE_ALLOCA)
1792 current_function_calls_alloca = true;
1793 if (flags & ECF_RETURNS_TWICE)
1794 current_function_calls_setjmp = true;
1798 /* Clear flags set by notice_special_calls. Used by dead code removal
1799 to update the flags. */
1801 void
1802 clear_special_calls (void)
1804 current_function_calls_alloca = false;
1805 current_function_calls_setjmp = false;
1809 static void
1810 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1812 tree t = *tp, op;
1814 switch (TREE_CODE (t))
1816 case COND_EXPR:
1817 remove_useless_stmts_cond (tp, data);
1818 break;
1820 case TRY_FINALLY_EXPR:
1821 remove_useless_stmts_tf (tp, data);
1822 break;
1824 case TRY_CATCH_EXPR:
1825 remove_useless_stmts_tc (tp, data);
1826 break;
1828 case BIND_EXPR:
1829 remove_useless_stmts_bind (tp, data);
1830 break;
1832 case GOTO_EXPR:
1833 remove_useless_stmts_goto (tp, data);
1834 break;
1836 case LABEL_EXPR:
1837 remove_useless_stmts_label (tp, data);
1838 break;
1840 case RETURN_EXPR:
1841 fold_stmt (tp);
1842 data->last_goto = NULL;
1843 data->may_branch = true;
1844 break;
1846 case CALL_EXPR:
1847 fold_stmt (tp);
1848 data->last_goto = NULL;
1849 notice_special_calls (t);
1850 update_call_expr_flags (t);
1851 if (tree_could_throw_p (t))
1852 data->may_throw = true;
1853 break;
1855 case MODIFY_EXPR:
1856 gcc_unreachable ();
1858 case GIMPLE_MODIFY_STMT:
1859 data->last_goto = NULL;
1860 fold_stmt (tp);
1861 op = get_call_expr_in (t);
1862 if (op)
1864 update_call_expr_flags (op);
1865 notice_special_calls (op);
1867 if (tree_could_throw_p (t))
1868 data->may_throw = true;
1869 break;
1871 case STATEMENT_LIST:
1873 tree_stmt_iterator i = tsi_start (t);
1874 while (!tsi_end_p (i))
1876 t = tsi_stmt (i);
1877 if (IS_EMPTY_STMT (t))
1879 tsi_delink (&i);
1880 continue;
1883 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1885 t = tsi_stmt (i);
1886 if (TREE_CODE (t) == STATEMENT_LIST)
1888 tsi_link_before (&i, t, TSI_SAME_STMT);
1889 tsi_delink (&i);
1891 else
1892 tsi_next (&i);
1895 break;
1896 case ASM_EXPR:
1897 fold_stmt (tp);
1898 data->last_goto = NULL;
1899 break;
1901 default:
1902 data->last_goto = NULL;
1903 break;
1907 static unsigned int
1908 remove_useless_stmts (void)
1910 struct rus_data data;
1912 clear_special_calls ();
1916 memset (&data, 0, sizeof (data));
1917 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1919 while (data.repeat);
1920 return 0;
1924 struct tree_opt_pass pass_remove_useless_stmts =
1926 "useless", /* name */
1927 NULL, /* gate */
1928 remove_useless_stmts, /* execute */
1929 NULL, /* sub */
1930 NULL, /* next */
1931 0, /* static_pass_number */
1932 0, /* tv_id */
1933 PROP_gimple_any, /* properties_required */
1934 0, /* properties_provided */
1935 0, /* properties_destroyed */
1936 0, /* todo_flags_start */
1937 TODO_dump_func, /* todo_flags_finish */
1938 0 /* letter */
1941 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1943 static void
1944 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1946 tree phi;
1948 /* Since this block is no longer reachable, we can just delete all
1949 of its PHI nodes. */
1950 phi = phi_nodes (bb);
1951 while (phi)
1953 tree next = PHI_CHAIN (phi);
1954 remove_phi_node (phi, NULL_TREE, true);
1955 phi = next;
1958 /* Remove edges to BB's successors. */
1959 while (EDGE_COUNT (bb->succs) > 0)
1960 remove_edge (EDGE_SUCC (bb, 0));
1964 /* Remove statements of basic block BB. */
1966 static void
1967 remove_bb (basic_block bb)
1969 block_stmt_iterator i;
1970 #ifdef USE_MAPPED_LOCATION
1971 source_location loc = UNKNOWN_LOCATION;
1972 #else
1973 source_locus loc = 0;
1974 #endif
1976 if (dump_file)
1978 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1979 if (dump_flags & TDF_DETAILS)
1981 dump_bb (bb, dump_file, 0);
1982 fprintf (dump_file, "\n");
1986 if (current_loops)
1988 struct loop *loop = bb->loop_father;
1990 /* If a loop gets removed, clean up the information associated
1991 with it. */
1992 if (loop->latch == bb
1993 || loop->header == bb)
1994 free_numbers_of_iterations_estimates_loop (loop);
1997 /* Remove all the instructions in the block. */
1998 if (bb_stmt_list (bb) != NULL_TREE)
2000 for (i = bsi_start (bb); !bsi_end_p (i);)
2002 tree stmt = bsi_stmt (i);
2003 if (TREE_CODE (stmt) == LABEL_EXPR
2004 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2005 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2007 basic_block new_bb;
2008 block_stmt_iterator new_bsi;
2010 /* A non-reachable non-local label may still be referenced.
2011 But it no longer needs to carry the extra semantics of
2012 non-locality. */
2013 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2015 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2016 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2019 new_bb = bb->prev_bb;
2020 new_bsi = bsi_start (new_bb);
2021 bsi_remove (&i, false);
2022 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2024 else
2026 /* Release SSA definitions if we are in SSA. Note that we
2027 may be called when not in SSA. For example,
2028 final_cleanup calls this function via
2029 cleanup_tree_cfg. */
2030 if (gimple_in_ssa_p (cfun))
2031 release_defs (stmt);
2033 bsi_remove (&i, true);
2036 /* Don't warn for removed gotos. Gotos are often removed due to
2037 jump threading, thus resulting in bogus warnings. Not great,
2038 since this way we lose warnings for gotos in the original
2039 program that are indeed unreachable. */
2040 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2042 #ifdef USE_MAPPED_LOCATION
2043 if (EXPR_HAS_LOCATION (stmt))
2044 loc = EXPR_LOCATION (stmt);
2045 #else
2046 source_locus t;
2047 t = EXPR_LOCUS (stmt);
2048 if (t && LOCATION_LINE (*t) > 0)
2049 loc = t;
2050 #endif
2055 /* If requested, give a warning that the first statement in the
2056 block is unreachable. We walk statements backwards in the
2057 loop above, so the last statement we process is the first statement
2058 in the block. */
2059 #ifdef USE_MAPPED_LOCATION
2060 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2061 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2062 #else
2063 if (loc)
2064 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2065 #endif
2067 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2068 bb->il.tree = NULL;
2072 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2073 predicate VAL, return the edge that will be taken out of the block.
2074 If VAL does not match a unique edge, NULL is returned. */
2076 edge
2077 find_taken_edge (basic_block bb, tree val)
2079 tree stmt;
2081 stmt = last_stmt (bb);
2083 gcc_assert (stmt);
2084 gcc_assert (is_ctrl_stmt (stmt));
2085 gcc_assert (val);
2087 if (! is_gimple_min_invariant (val))
2088 return NULL;
2090 if (TREE_CODE (stmt) == COND_EXPR)
2091 return find_taken_edge_cond_expr (bb, val);
2093 if (TREE_CODE (stmt) == SWITCH_EXPR)
2094 return find_taken_edge_switch_expr (bb, val);
2096 if (computed_goto_p (stmt))
2098 /* Only optimize if the argument is a label, if the argument is
2099 not a label then we can not construct a proper CFG.
2101 It may be the case that we only need to allow the LABEL_REF to
2102 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2103 appear inside a LABEL_EXPR just to be safe. */
2104 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2105 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2106 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2107 return NULL;
2110 gcc_unreachable ();
2113 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2114 statement, determine which of the outgoing edges will be taken out of the
2115 block. Return NULL if either edge may be taken. */
2117 static edge
2118 find_taken_edge_computed_goto (basic_block bb, tree val)
2120 basic_block dest;
2121 edge e = NULL;
2123 dest = label_to_block (val);
2124 if (dest)
2126 e = find_edge (bb, dest);
2127 gcc_assert (e != NULL);
2130 return e;
2133 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2134 statement, determine which of the two edges will be taken out of the
2135 block. Return NULL if either edge may be taken. */
2137 static edge
2138 find_taken_edge_cond_expr (basic_block bb, tree val)
2140 edge true_edge, false_edge;
2142 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2144 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2145 return (integer_zerop (val) ? false_edge : true_edge);
2148 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2149 statement, determine which edge will be taken out of the block. Return
2150 NULL if any edge may be taken. */
2152 static edge
2153 find_taken_edge_switch_expr (basic_block bb, tree val)
2155 tree switch_expr, taken_case;
2156 basic_block dest_bb;
2157 edge e;
2159 switch_expr = last_stmt (bb);
2160 taken_case = find_case_label_for_value (switch_expr, val);
2161 dest_bb = label_to_block (CASE_LABEL (taken_case));
2163 e = find_edge (bb, dest_bb);
2164 gcc_assert (e);
2165 return e;
2169 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2170 We can make optimal use here of the fact that the case labels are
2171 sorted: We can do a binary search for a case matching VAL. */
2173 static tree
2174 find_case_label_for_value (tree switch_expr, tree val)
2176 tree vec = SWITCH_LABELS (switch_expr);
2177 size_t low, high, n = TREE_VEC_LENGTH (vec);
2178 tree default_case = TREE_VEC_ELT (vec, n - 1);
2180 for (low = -1, high = n - 1; high - low > 1; )
2182 size_t i = (high + low) / 2;
2183 tree t = TREE_VEC_ELT (vec, i);
2184 int cmp;
2186 /* Cache the result of comparing CASE_LOW and val. */
2187 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2189 if (cmp > 0)
2190 high = i;
2191 else
2192 low = i;
2194 if (CASE_HIGH (t) == NULL)
2196 /* A singe-valued case label. */
2197 if (cmp == 0)
2198 return t;
2200 else
2202 /* A case range. We can only handle integer ranges. */
2203 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2204 return t;
2208 return default_case;
2214 /*---------------------------------------------------------------------------
2215 Debugging functions
2216 ---------------------------------------------------------------------------*/
2218 /* Dump tree-specific information of block BB to file OUTF. */
2220 void
2221 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2223 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2227 /* Dump a basic block on stderr. */
2229 void
2230 debug_tree_bb (basic_block bb)
2232 dump_bb (bb, stderr, 0);
2236 /* Dump basic block with index N on stderr. */
2238 basic_block
2239 debug_tree_bb_n (int n)
2241 debug_tree_bb (BASIC_BLOCK (n));
2242 return BASIC_BLOCK (n);
2246 /* Dump the CFG on stderr.
2248 FLAGS are the same used by the tree dumping functions
2249 (see TDF_* in tree-pass.h). */
2251 void
2252 debug_tree_cfg (int flags)
2254 dump_tree_cfg (stderr, flags);
2258 /* Dump the program showing basic block boundaries on the given FILE.
2260 FLAGS are the same used by the tree dumping functions (see TDF_* in
2261 tree.h). */
2263 void
2264 dump_tree_cfg (FILE *file, int flags)
2266 if (flags & TDF_DETAILS)
2268 const char *funcname
2269 = lang_hooks.decl_printable_name (current_function_decl, 2);
2271 fputc ('\n', file);
2272 fprintf (file, ";; Function %s\n\n", funcname);
2273 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2274 n_basic_blocks, n_edges, last_basic_block);
2276 brief_dump_cfg (file);
2277 fprintf (file, "\n");
2280 if (flags & TDF_STATS)
2281 dump_cfg_stats (file);
2283 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2287 /* Dump CFG statistics on FILE. */
2289 void
2290 dump_cfg_stats (FILE *file)
2292 static long max_num_merged_labels = 0;
2293 unsigned long size, total = 0;
2294 long num_edges;
2295 basic_block bb;
2296 const char * const fmt_str = "%-30s%-13s%12s\n";
2297 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2298 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2299 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2300 const char *funcname
2301 = lang_hooks.decl_printable_name (current_function_decl, 2);
2304 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2306 fprintf (file, "---------------------------------------------------------\n");
2307 fprintf (file, fmt_str, "", " Number of ", "Memory");
2308 fprintf (file, fmt_str, "", " instances ", "used ");
2309 fprintf (file, "---------------------------------------------------------\n");
2311 size = n_basic_blocks * sizeof (struct basic_block_def);
2312 total += size;
2313 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2314 SCALE (size), LABEL (size));
2316 num_edges = 0;
2317 FOR_EACH_BB (bb)
2318 num_edges += EDGE_COUNT (bb->succs);
2319 size = num_edges * sizeof (struct edge_def);
2320 total += size;
2321 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2323 fprintf (file, "---------------------------------------------------------\n");
2324 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2325 LABEL (total));
2326 fprintf (file, "---------------------------------------------------------\n");
2327 fprintf (file, "\n");
2329 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2330 max_num_merged_labels = cfg_stats.num_merged_labels;
2332 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2333 cfg_stats.num_merged_labels, max_num_merged_labels);
2335 fprintf (file, "\n");
2339 /* Dump CFG statistics on stderr. Keep extern so that it's always
2340 linked in the final executable. */
2342 void
2343 debug_cfg_stats (void)
2345 dump_cfg_stats (stderr);
2349 /* Dump the flowgraph to a .vcg FILE. */
2351 static void
2352 tree_cfg2vcg (FILE *file)
2354 edge e;
2355 edge_iterator ei;
2356 basic_block bb;
2357 const char *funcname
2358 = lang_hooks.decl_printable_name (current_function_decl, 2);
2360 /* Write the file header. */
2361 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2362 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2363 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2365 /* Write blocks and edges. */
2366 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2368 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2369 e->dest->index);
2371 if (e->flags & EDGE_FAKE)
2372 fprintf (file, " linestyle: dotted priority: 10");
2373 else
2374 fprintf (file, " linestyle: solid priority: 100");
2376 fprintf (file, " }\n");
2378 fputc ('\n', file);
2380 FOR_EACH_BB (bb)
2382 enum tree_code head_code, end_code;
2383 const char *head_name, *end_name;
2384 int head_line = 0;
2385 int end_line = 0;
2386 tree first = first_stmt (bb);
2387 tree last = last_stmt (bb);
2389 if (first)
2391 head_code = TREE_CODE (first);
2392 head_name = tree_code_name[head_code];
2393 head_line = get_lineno (first);
2395 else
2396 head_name = "no-statement";
2398 if (last)
2400 end_code = TREE_CODE (last);
2401 end_name = tree_code_name[end_code];
2402 end_line = get_lineno (last);
2404 else
2405 end_name = "no-statement";
2407 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2408 bb->index, bb->index, head_name, head_line, end_name,
2409 end_line);
2411 FOR_EACH_EDGE (e, ei, bb->succs)
2413 if (e->dest == EXIT_BLOCK_PTR)
2414 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2415 else
2416 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2418 if (e->flags & EDGE_FAKE)
2419 fprintf (file, " priority: 10 linestyle: dotted");
2420 else
2421 fprintf (file, " priority: 100 linestyle: solid");
2423 fprintf (file, " }\n");
2426 if (bb->next_bb != EXIT_BLOCK_PTR)
2427 fputc ('\n', file);
2430 fputs ("}\n\n", file);
2435 /*---------------------------------------------------------------------------
2436 Miscellaneous helpers
2437 ---------------------------------------------------------------------------*/
2439 /* Return true if T represents a stmt that always transfers control. */
2441 bool
2442 is_ctrl_stmt (const_tree t)
2444 return (TREE_CODE (t) == COND_EXPR
2445 || TREE_CODE (t) == SWITCH_EXPR
2446 || TREE_CODE (t) == GOTO_EXPR
2447 || TREE_CODE (t) == RETURN_EXPR
2448 || TREE_CODE (t) == RESX_EXPR);
2452 /* Return true if T is a statement that may alter the flow of control
2453 (e.g., a call to a non-returning function). */
2455 bool
2456 is_ctrl_altering_stmt (const_tree t)
2458 const_tree call;
2460 gcc_assert (t);
2461 call = get_call_expr_in (CONST_CAST_TREE (t));
2462 if (call)
2464 /* A non-pure/const CALL_EXPR alters flow control if the current
2465 function has nonlocal labels. */
2466 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2467 return true;
2469 /* A CALL_EXPR also alters control flow if it does not return. */
2470 if (call_expr_flags (call) & ECF_NORETURN)
2471 return true;
2474 /* OpenMP directives alter control flow. */
2475 if (OMP_DIRECTIVE_P (t))
2476 return true;
2478 /* If a statement can throw, it alters control flow. */
2479 return tree_can_throw_internal (t);
2483 /* Return true if T is a computed goto. */
2485 bool
2486 computed_goto_p (const_tree t)
2488 return (TREE_CODE (t) == GOTO_EXPR
2489 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2493 /* Return true if T is a simple local goto. */
2495 bool
2496 simple_goto_p (const_tree t)
2498 return (TREE_CODE (t) == GOTO_EXPR
2499 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2503 /* Return true if T can make an abnormal transfer of control flow.
2504 Transfers of control flow associated with EH are excluded. */
2506 bool
2507 tree_can_make_abnormal_goto (const_tree t)
2509 if (computed_goto_p (t))
2510 return true;
2511 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2512 t = GIMPLE_STMT_OPERAND (t, 1);
2513 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2514 t = TREE_OPERAND (t, 0);
2515 if (TREE_CODE (t) == CALL_EXPR)
2516 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2517 return false;
2521 /* Return true if T should start a new basic block. PREV_T is the
2522 statement preceding T. It is used when T is a label or a case label.
2523 Labels should only start a new basic block if their previous statement
2524 wasn't a label. Otherwise, sequence of labels would generate
2525 unnecessary basic blocks that only contain a single label. */
2527 static inline bool
2528 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2530 if (t == NULL_TREE)
2531 return false;
2533 /* LABEL_EXPRs start a new basic block only if the preceding
2534 statement wasn't a label of the same type. This prevents the
2535 creation of consecutive blocks that have nothing but a single
2536 label. */
2537 if (TREE_CODE (t) == LABEL_EXPR)
2539 /* Nonlocal and computed GOTO targets always start a new block. */
2540 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2541 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2542 return true;
2544 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2546 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2547 return true;
2549 cfg_stats.num_merged_labels++;
2550 return false;
2552 else
2553 return true;
2556 return false;
2560 /* Return true if T should end a basic block. */
2562 bool
2563 stmt_ends_bb_p (const_tree t)
2565 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2568 /* Remove block annotations and other datastructures. */
2570 void
2571 delete_tree_cfg_annotations (void)
2573 basic_block bb;
2574 block_stmt_iterator bsi;
2576 /* Remove annotations from every tree in the function. */
2577 FOR_EACH_BB (bb)
2578 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2580 tree stmt = bsi_stmt (bsi);
2581 ggc_free (stmt->base.ann);
2582 stmt->base.ann = NULL;
2584 label_to_block_map = NULL;
2588 /* Return the first statement in basic block BB. */
2590 tree
2591 first_stmt (basic_block bb)
2593 block_stmt_iterator i = bsi_start (bb);
2594 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2597 /* Return the last statement in basic block BB. */
2599 tree
2600 last_stmt (basic_block bb)
2602 block_stmt_iterator b = bsi_last (bb);
2603 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2606 /* Return the last statement of an otherwise empty block. Return NULL
2607 if the block is totally empty, or if it contains more than one
2608 statement. */
2610 tree
2611 last_and_only_stmt (basic_block bb)
2613 block_stmt_iterator i = bsi_last (bb);
2614 tree last, prev;
2616 if (bsi_end_p (i))
2617 return NULL_TREE;
2619 last = bsi_stmt (i);
2620 bsi_prev (&i);
2621 if (bsi_end_p (i))
2622 return last;
2624 /* Empty statements should no longer appear in the instruction stream.
2625 Everything that might have appeared before should be deleted by
2626 remove_useless_stmts, and the optimizers should just bsi_remove
2627 instead of smashing with build_empty_stmt.
2629 Thus the only thing that should appear here in a block containing
2630 one executable statement is a label. */
2631 prev = bsi_stmt (i);
2632 if (TREE_CODE (prev) == LABEL_EXPR)
2633 return last;
2634 else
2635 return NULL_TREE;
2639 /* Mark BB as the basic block holding statement T. */
2641 void
2642 set_bb_for_stmt (tree t, basic_block bb)
2644 if (TREE_CODE (t) == PHI_NODE)
2645 PHI_BB (t) = bb;
2646 else if (TREE_CODE (t) == STATEMENT_LIST)
2648 tree_stmt_iterator i;
2649 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2650 set_bb_for_stmt (tsi_stmt (i), bb);
2652 else
2654 stmt_ann_t ann = get_stmt_ann (t);
2655 ann->bb = bb;
2657 /* If the statement is a label, add the label to block-to-labels map
2658 so that we can speed up edge creation for GOTO_EXPRs. */
2659 if (TREE_CODE (t) == LABEL_EXPR)
2661 int uid;
2663 t = LABEL_EXPR_LABEL (t);
2664 uid = LABEL_DECL_UID (t);
2665 if (uid == -1)
2667 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2668 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2669 if (old_len <= (unsigned) uid)
2671 unsigned new_len = 3 * uid / 2;
2673 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2674 new_len);
2677 else
2678 /* We're moving an existing label. Make sure that we've
2679 removed it from the old block. */
2680 gcc_assert (!bb
2681 || !VEC_index (basic_block, label_to_block_map, uid));
2682 VEC_replace (basic_block, label_to_block_map, uid, bb);
2687 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2688 from one basic block to another.
2689 For BB splitting we can run into quadratic case, so performance is quite
2690 important and knowing that the tables are big enough, change_bb_for_stmt
2691 can inline as leaf function. */
2692 static inline void
2693 change_bb_for_stmt (tree t, basic_block bb)
2695 get_stmt_ann (t)->bb = bb;
2696 if (TREE_CODE (t) == LABEL_EXPR)
2697 VEC_replace (basic_block, label_to_block_map,
2698 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2701 /* Finds iterator for STMT. */
2703 extern block_stmt_iterator
2704 bsi_for_stmt (tree stmt)
2706 block_stmt_iterator bsi;
2708 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2709 if (bsi_stmt (bsi) == stmt)
2710 return bsi;
2712 gcc_unreachable ();
2715 /* Mark statement T as modified, and update it. */
2716 static inline void
2717 update_modified_stmts (tree t)
2719 if (!ssa_operands_active ())
2720 return;
2721 if (TREE_CODE (t) == STATEMENT_LIST)
2723 tree_stmt_iterator i;
2724 tree stmt;
2725 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2727 stmt = tsi_stmt (i);
2728 update_stmt_if_modified (stmt);
2731 else
2732 update_stmt_if_modified (t);
2735 /* Insert statement (or statement list) T before the statement
2736 pointed-to by iterator I. M specifies how to update iterator I
2737 after insertion (see enum bsi_iterator_update). */
2739 void
2740 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2742 set_bb_for_stmt (t, i->bb);
2743 update_modified_stmts (t);
2744 tsi_link_before (&i->tsi, t, m);
2748 /* Insert statement (or statement list) T after the statement
2749 pointed-to by iterator I. M specifies how to update iterator I
2750 after insertion (see enum bsi_iterator_update). */
2752 void
2753 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2755 set_bb_for_stmt (t, i->bb);
2756 update_modified_stmts (t);
2757 tsi_link_after (&i->tsi, t, m);
2761 /* Remove the statement pointed to by iterator I. The iterator is updated
2762 to the next statement.
2764 When REMOVE_EH_INFO is true we remove the statement pointed to by
2765 iterator I from the EH tables. Otherwise we do not modify the EH
2766 tables.
2768 Generally, REMOVE_EH_INFO should be true when the statement is going to
2769 be removed from the IL and not reinserted elsewhere. */
2771 void
2772 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2774 tree t = bsi_stmt (*i);
2775 set_bb_for_stmt (t, NULL);
2776 delink_stmt_imm_use (t);
2777 tsi_delink (&i->tsi);
2778 mark_stmt_modified (t);
2779 if (remove_eh_info)
2781 remove_stmt_from_eh_region (t);
2782 gimple_remove_stmt_histograms (cfun, t);
2787 /* Move the statement at FROM so it comes right after the statement at TO. */
2789 void
2790 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2792 tree stmt = bsi_stmt (*from);
2793 bsi_remove (from, false);
2794 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2795 move statements to an empty block. */
2796 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2800 /* Move the statement at FROM so it comes right before the statement at TO. */
2802 void
2803 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2805 tree stmt = bsi_stmt (*from);
2806 bsi_remove (from, false);
2807 /* For consistency with bsi_move_after, it might be better to have
2808 BSI_NEW_STMT here; however, that breaks several places that expect
2809 that TO does not change. */
2810 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2814 /* Move the statement at FROM to the end of basic block BB. */
2816 void
2817 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2819 block_stmt_iterator last = bsi_last (bb);
2821 /* Have to check bsi_end_p because it could be an empty block. */
2822 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2823 bsi_move_before (from, &last);
2824 else
2825 bsi_move_after (from, &last);
2829 /* Replace the contents of the statement pointed to by iterator BSI
2830 with STMT. If UPDATE_EH_INFO is true, the exception handling
2831 information of the original statement is moved to the new statement. */
2833 void
2834 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2836 int eh_region;
2837 tree orig_stmt = bsi_stmt (*bsi);
2839 if (stmt == orig_stmt)
2840 return;
2841 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2842 set_bb_for_stmt (stmt, bsi->bb);
2844 /* Preserve EH region information from the original statement, if
2845 requested by the caller. */
2846 if (update_eh_info)
2848 eh_region = lookup_stmt_eh_region (orig_stmt);
2849 if (eh_region >= 0)
2851 remove_stmt_from_eh_region (orig_stmt);
2852 add_stmt_to_eh_region (stmt, eh_region);
2856 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2857 gimple_remove_stmt_histograms (cfun, orig_stmt);
2858 delink_stmt_imm_use (orig_stmt);
2859 *bsi_stmt_ptr (*bsi) = stmt;
2860 mark_stmt_modified (stmt);
2861 update_modified_stmts (stmt);
2865 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2866 is made to place the statement in an existing basic block, but
2867 sometimes that isn't possible. When it isn't possible, the edge is
2868 split and the statement is added to the new block.
2870 In all cases, the returned *BSI points to the correct location. The
2871 return value is true if insertion should be done after the location,
2872 or false if it should be done before the location. If new basic block
2873 has to be created, it is stored in *NEW_BB. */
2875 static bool
2876 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2877 basic_block *new_bb)
2879 basic_block dest, src;
2880 tree tmp;
2882 dest = e->dest;
2883 restart:
2885 /* If the destination has one predecessor which has no PHI nodes,
2886 insert there. Except for the exit block.
2888 The requirement for no PHI nodes could be relaxed. Basically we
2889 would have to examine the PHIs to prove that none of them used
2890 the value set by the statement we want to insert on E. That
2891 hardly seems worth the effort. */
2892 if (single_pred_p (dest)
2893 && ! phi_nodes (dest)
2894 && dest != EXIT_BLOCK_PTR)
2896 *bsi = bsi_start (dest);
2897 if (bsi_end_p (*bsi))
2898 return true;
2900 /* Make sure we insert after any leading labels. */
2901 tmp = bsi_stmt (*bsi);
2902 while (TREE_CODE (tmp) == LABEL_EXPR)
2904 bsi_next (bsi);
2905 if (bsi_end_p (*bsi))
2906 break;
2907 tmp = bsi_stmt (*bsi);
2910 if (bsi_end_p (*bsi))
2912 *bsi = bsi_last (dest);
2913 return true;
2915 else
2916 return false;
2919 /* If the source has one successor, the edge is not abnormal and
2920 the last statement does not end a basic block, insert there.
2921 Except for the entry block. */
2922 src = e->src;
2923 if ((e->flags & EDGE_ABNORMAL) == 0
2924 && single_succ_p (src)
2925 && src != ENTRY_BLOCK_PTR)
2927 *bsi = bsi_last (src);
2928 if (bsi_end_p (*bsi))
2929 return true;
2931 tmp = bsi_stmt (*bsi);
2932 if (!stmt_ends_bb_p (tmp))
2933 return true;
2935 /* Insert code just before returning the value. We may need to decompose
2936 the return in the case it contains non-trivial operand. */
2937 if (TREE_CODE (tmp) == RETURN_EXPR)
2939 tree op = TREE_OPERAND (tmp, 0);
2940 if (op && !is_gimple_val (op))
2942 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2943 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2944 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2946 bsi_prev (bsi);
2947 return true;
2951 /* Otherwise, create a new basic block, and split this edge. */
2952 dest = split_edge (e);
2953 if (new_bb)
2954 *new_bb = dest;
2955 e = single_pred_edge (dest);
2956 goto restart;
2960 /* This routine will commit all pending edge insertions, creating any new
2961 basic blocks which are necessary. */
2963 void
2964 bsi_commit_edge_inserts (void)
2966 basic_block bb;
2967 edge e;
2968 edge_iterator ei;
2970 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2972 FOR_EACH_BB (bb)
2973 FOR_EACH_EDGE (e, ei, bb->succs)
2974 bsi_commit_one_edge_insert (e, NULL);
2978 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2979 to this block, otherwise set it to NULL. */
2981 void
2982 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2984 if (new_bb)
2985 *new_bb = NULL;
2986 if (PENDING_STMT (e))
2988 block_stmt_iterator bsi;
2989 tree stmt = PENDING_STMT (e);
2991 PENDING_STMT (e) = NULL_TREE;
2993 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2994 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2995 else
2996 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3001 /* Add STMT to the pending list of edge E. No actual insertion is
3002 made until a call to bsi_commit_edge_inserts () is made. */
3004 void
3005 bsi_insert_on_edge (edge e, tree stmt)
3007 append_to_statement_list (stmt, &PENDING_STMT (e));
3010 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3011 block has to be created, it is returned. */
3013 basic_block
3014 bsi_insert_on_edge_immediate (edge e, tree stmt)
3016 block_stmt_iterator bsi;
3017 basic_block new_bb = NULL;
3019 gcc_assert (!PENDING_STMT (e));
3021 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3022 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3023 else
3024 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3026 return new_bb;
3029 /*---------------------------------------------------------------------------
3030 Tree specific functions for CFG manipulation
3031 ---------------------------------------------------------------------------*/
3033 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3035 static void
3036 reinstall_phi_args (edge new_edge, edge old_edge)
3038 tree var, phi;
3040 if (!PENDING_STMT (old_edge))
3041 return;
3043 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3044 var && phi;
3045 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3047 tree result = TREE_PURPOSE (var);
3048 tree arg = TREE_VALUE (var);
3050 gcc_assert (result == PHI_RESULT (phi));
3052 add_phi_arg (phi, arg, new_edge);
3055 PENDING_STMT (old_edge) = NULL;
3058 /* Returns the basic block after which the new basic block created
3059 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3060 near its "logical" location. This is of most help to humans looking
3061 at debugging dumps. */
3063 static basic_block
3064 split_edge_bb_loc (edge edge_in)
3066 basic_block dest = edge_in->dest;
3068 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3069 return edge_in->src;
3070 else
3071 return dest->prev_bb;
3074 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3075 Abort on abnormal edges. */
3077 static basic_block
3078 tree_split_edge (edge edge_in)
3080 basic_block new_bb, after_bb, dest;
3081 edge new_edge, e;
3083 /* Abnormal edges cannot be split. */
3084 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3086 dest = edge_in->dest;
3088 after_bb = split_edge_bb_loc (edge_in);
3090 new_bb = create_empty_bb (after_bb);
3091 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3092 new_bb->count = edge_in->count;
3093 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3094 new_edge->probability = REG_BR_PROB_BASE;
3095 new_edge->count = edge_in->count;
3097 e = redirect_edge_and_branch (edge_in, new_bb);
3098 gcc_assert (e == edge_in);
3099 reinstall_phi_args (new_edge, e);
3101 return new_bb;
3104 /* Callback for walk_tree, check that all elements with address taken are
3105 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3106 inside a PHI node. */
3108 static tree
3109 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3111 tree t = *tp, x;
3112 bool in_phi = (data != NULL);
3114 if (TYPE_P (t))
3115 *walk_subtrees = 0;
3117 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3118 #define CHECK_OP(N, MSG) \
3119 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3120 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3122 switch (TREE_CODE (t))
3124 case SSA_NAME:
3125 if (SSA_NAME_IN_FREE_LIST (t))
3127 error ("SSA name in freelist but still referenced");
3128 return *tp;
3130 break;
3132 case ASSERT_EXPR:
3133 x = fold (ASSERT_EXPR_COND (t));
3134 if (x == boolean_false_node)
3136 error ("ASSERT_EXPR with an always-false condition");
3137 return *tp;
3139 break;
3141 case MODIFY_EXPR:
3142 gcc_unreachable ();
3144 case GIMPLE_MODIFY_STMT:
3145 x = GIMPLE_STMT_OPERAND (t, 0);
3146 if (TREE_CODE (x) == BIT_FIELD_REF
3147 && is_gimple_reg (TREE_OPERAND (x, 0)))
3149 error ("GIMPLE register modified with BIT_FIELD_REF");
3150 return t;
3152 break;
3154 case ADDR_EXPR:
3156 bool old_invariant;
3157 bool old_constant;
3158 bool old_side_effects;
3159 bool new_invariant;
3160 bool new_constant;
3161 bool new_side_effects;
3163 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3164 dead PHIs that take the address of something. But if the PHI
3165 result is dead, the fact that it takes the address of anything
3166 is irrelevant. Because we can not tell from here if a PHI result
3167 is dead, we just skip this check for PHIs altogether. This means
3168 we may be missing "valid" checks, but what can you do?
3169 This was PR19217. */
3170 if (in_phi)
3171 break;
3173 old_invariant = TREE_INVARIANT (t);
3174 old_constant = TREE_CONSTANT (t);
3175 old_side_effects = TREE_SIDE_EFFECTS (t);
3177 recompute_tree_invariant_for_addr_expr (t);
3178 new_invariant = TREE_INVARIANT (t);
3179 new_side_effects = TREE_SIDE_EFFECTS (t);
3180 new_constant = TREE_CONSTANT (t);
3182 if (old_invariant != new_invariant)
3184 error ("invariant not recomputed when ADDR_EXPR changed");
3185 return t;
3188 if (old_constant != new_constant)
3190 error ("constant not recomputed when ADDR_EXPR changed");
3191 return t;
3193 if (old_side_effects != new_side_effects)
3195 error ("side effects not recomputed when ADDR_EXPR changed");
3196 return t;
3199 /* Skip any references (they will be checked when we recurse down the
3200 tree) and ensure that any variable used as a prefix is marked
3201 addressable. */
3202 for (x = TREE_OPERAND (t, 0);
3203 handled_component_p (x);
3204 x = TREE_OPERAND (x, 0))
3207 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3208 return NULL;
3209 if (!TREE_ADDRESSABLE (x))
3211 error ("address taken, but ADDRESSABLE bit not set");
3212 return x;
3214 break;
3217 case COND_EXPR:
3218 x = COND_EXPR_COND (t);
3219 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3221 error ("non-integral used in condition");
3222 return x;
3224 if (!is_gimple_condexpr (x))
3226 error ("invalid conditional operand");
3227 return x;
3229 break;
3231 case NOP_EXPR:
3232 case CONVERT_EXPR:
3233 case FIX_TRUNC_EXPR:
3234 case FLOAT_EXPR:
3235 case NEGATE_EXPR:
3236 case ABS_EXPR:
3237 case BIT_NOT_EXPR:
3238 case NON_LVALUE_EXPR:
3239 case TRUTH_NOT_EXPR:
3240 CHECK_OP (0, "invalid operand to unary operator");
3241 break;
3243 case REALPART_EXPR:
3244 case IMAGPART_EXPR:
3245 case COMPONENT_REF:
3246 case ARRAY_REF:
3247 case ARRAY_RANGE_REF:
3248 case BIT_FIELD_REF:
3249 case VIEW_CONVERT_EXPR:
3250 /* We have a nest of references. Verify that each of the operands
3251 that determine where to reference is either a constant or a variable,
3252 verify that the base is valid, and then show we've already checked
3253 the subtrees. */
3254 while (handled_component_p (t))
3256 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3257 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3258 else if (TREE_CODE (t) == ARRAY_REF
3259 || TREE_CODE (t) == ARRAY_RANGE_REF)
3261 CHECK_OP (1, "invalid array index");
3262 if (TREE_OPERAND (t, 2))
3263 CHECK_OP (2, "invalid array lower bound");
3264 if (TREE_OPERAND (t, 3))
3265 CHECK_OP (3, "invalid array stride");
3267 else if (TREE_CODE (t) == BIT_FIELD_REF)
3269 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3270 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3273 t = TREE_OPERAND (t, 0);
3276 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3278 error ("invalid reference prefix");
3279 return t;
3281 *walk_subtrees = 0;
3282 break;
3283 case PLUS_EXPR:
3284 case MINUS_EXPR:
3285 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3286 POINTER_PLUS_EXPR. */
3287 if (POINTER_TYPE_P (TREE_TYPE (t)))
3289 error ("invalid operand to plus/minus, type is a pointer");
3290 return t;
3292 CHECK_OP (0, "invalid operand to binary operator");
3293 CHECK_OP (1, "invalid operand to binary operator");
3294 break;
3296 case POINTER_PLUS_EXPR:
3297 /* Check to make sure the first operand is a pointer or reference type. */
3298 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3300 error ("invalid operand to pointer plus, first operand is not a pointer");
3301 return t;
3303 /* Check to make sure the second operand is an integer with type of
3304 sizetype. */
3305 if (!useless_type_conversion_p (sizetype,
3306 TREE_TYPE (TREE_OPERAND (t, 1))))
3308 error ("invalid operand to pointer plus, second operand is not an "
3309 "integer with type of sizetype.");
3310 return t;
3312 /* FALLTHROUGH */
3313 case LT_EXPR:
3314 case LE_EXPR:
3315 case GT_EXPR:
3316 case GE_EXPR:
3317 case EQ_EXPR:
3318 case NE_EXPR:
3319 case UNORDERED_EXPR:
3320 case ORDERED_EXPR:
3321 case UNLT_EXPR:
3322 case UNLE_EXPR:
3323 case UNGT_EXPR:
3324 case UNGE_EXPR:
3325 case UNEQ_EXPR:
3326 case LTGT_EXPR:
3327 case MULT_EXPR:
3328 case TRUNC_DIV_EXPR:
3329 case CEIL_DIV_EXPR:
3330 case FLOOR_DIV_EXPR:
3331 case ROUND_DIV_EXPR:
3332 case TRUNC_MOD_EXPR:
3333 case CEIL_MOD_EXPR:
3334 case FLOOR_MOD_EXPR:
3335 case ROUND_MOD_EXPR:
3336 case RDIV_EXPR:
3337 case EXACT_DIV_EXPR:
3338 case MIN_EXPR:
3339 case MAX_EXPR:
3340 case LSHIFT_EXPR:
3341 case RSHIFT_EXPR:
3342 case LROTATE_EXPR:
3343 case RROTATE_EXPR:
3344 case BIT_IOR_EXPR:
3345 case BIT_XOR_EXPR:
3346 case BIT_AND_EXPR:
3347 CHECK_OP (0, "invalid operand to binary operator");
3348 CHECK_OP (1, "invalid operand to binary operator");
3349 break;
3351 case CONSTRUCTOR:
3352 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3353 *walk_subtrees = 0;
3354 break;
3356 default:
3357 break;
3359 return NULL;
3361 #undef CHECK_OP
3364 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3365 if there is an error, otherwise false. */
3367 static bool
3368 verify_gimple_unary_expr (const_tree expr)
3370 tree op = TREE_OPERAND (expr, 0);
3371 tree type = TREE_TYPE (expr);
3373 if (!is_gimple_val (op))
3375 error ("invalid operand in unary expression");
3376 return true;
3379 /* For general unary expressions we have the operations type
3380 as the effective type the operation is carried out on. So all
3381 we need to require is that the operand is trivially convertible
3382 to that type. */
3383 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3385 error ("type mismatch in unary expression");
3386 debug_generic_expr (type);
3387 debug_generic_expr (TREE_TYPE (op));
3388 return true;
3391 return false;
3394 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3395 if there is an error, otherwise false. */
3397 static bool
3398 verify_gimple_binary_expr (const_tree expr)
3400 tree op0 = TREE_OPERAND (expr, 0);
3401 tree op1 = TREE_OPERAND (expr, 1);
3402 tree type = TREE_TYPE (expr);
3404 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3406 error ("invalid operands in binary expression");
3407 return true;
3410 /* For general binary expressions we have the operations type
3411 as the effective type the operation is carried out on. So all
3412 we need to require is that both operands are trivially convertible
3413 to that type. */
3414 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3415 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3417 error ("type mismatch in binary expression");
3418 debug_generic_stmt (type);
3419 debug_generic_stmt (TREE_TYPE (op0));
3420 debug_generic_stmt (TREE_TYPE (op1));
3421 return true;
3424 return false;
3427 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3428 Returns true if there is an error, otherwise false. */
3430 static bool
3431 verify_gimple_min_lval (tree expr)
3433 tree op;
3435 if (is_gimple_id (expr))
3436 return false;
3438 if (TREE_CODE (expr) != INDIRECT_REF
3439 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3440 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3442 error ("invalid expression for min lvalue");
3443 return true;
3446 op = TREE_OPERAND (expr, 0);
3447 if (!is_gimple_val (op))
3449 error ("invalid operand in indirect reference");
3450 debug_generic_stmt (op);
3451 return true;
3453 if (!useless_type_conversion_p (TREE_TYPE (expr),
3454 TREE_TYPE (TREE_TYPE (op))))
3456 error ("type mismatch in indirect reference");
3457 debug_generic_stmt (TREE_TYPE (expr));
3458 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3459 return true;
3462 return false;
3465 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3466 if there is an error, otherwise false. */
3468 static bool
3469 verify_gimple_reference (tree expr)
3471 while (handled_component_p (expr))
3473 tree op = TREE_OPERAND (expr, 0);
3475 if (TREE_CODE (expr) == ARRAY_REF
3476 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3478 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3479 || (TREE_OPERAND (expr, 2)
3480 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3481 || (TREE_OPERAND (expr, 3)
3482 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3484 error ("invalid operands to array reference");
3485 debug_generic_stmt (expr);
3486 return true;
3490 /* Verify if the reference array element types are compatible. */
3491 if (TREE_CODE (expr) == ARRAY_REF
3492 && !useless_type_conversion_p (TREE_TYPE (expr),
3493 TREE_TYPE (TREE_TYPE (op))))
3495 error ("type mismatch in array reference");
3496 debug_generic_stmt (TREE_TYPE (expr));
3497 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3498 return true;
3500 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3501 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3502 TREE_TYPE (TREE_TYPE (op))))
3504 error ("type mismatch in array range reference");
3505 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3506 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3507 return true;
3510 if ((TREE_CODE (expr) == REALPART_EXPR
3511 || TREE_CODE (expr) == IMAGPART_EXPR)
3512 && !useless_type_conversion_p (TREE_TYPE (expr),
3513 TREE_TYPE (TREE_TYPE (op))))
3515 error ("type mismatch in real/imagpart reference");
3516 debug_generic_stmt (TREE_TYPE (expr));
3517 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3518 return true;
3521 if (TREE_CODE (expr) == COMPONENT_REF
3522 && !useless_type_conversion_p (TREE_TYPE (expr),
3523 TREE_TYPE (TREE_OPERAND (expr, 1))))
3525 error ("type mismatch in component reference");
3526 debug_generic_stmt (TREE_TYPE (expr));
3527 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3528 return true;
3531 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3532 is nothing to verify. Gross mismatches at most invoke
3533 undefined behavior. */
3535 expr = op;
3538 return verify_gimple_min_lval (expr);
3541 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3542 error, otherwise false. */
3544 static bool
3545 verify_gimple_expr (tree expr)
3547 tree type = TREE_TYPE (expr);
3549 if (is_gimple_val (expr))
3550 return false;
3552 /* Special codes we cannot handle via their class. */
3553 switch (TREE_CODE (expr))
3555 case NOP_EXPR:
3556 case CONVERT_EXPR:
3558 tree op = TREE_OPERAND (expr, 0);
3559 if (!is_gimple_val (op))
3561 error ("invalid operand in conversion");
3562 return true;
3565 /* Allow conversions between integral types and between
3566 pointer types. */
3567 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3568 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
3569 return false;
3571 /* Allow conversions between integral types and pointers only if
3572 there is no sign or zero extension involved. */
3573 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3574 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3575 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
3576 return false;
3578 /* Allow conversion from integer to offset type and vice versa. */
3579 if ((TREE_CODE (type) == OFFSET_TYPE
3580 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3581 || (TREE_CODE (type) == INTEGER_TYPE
3582 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3583 return false;
3585 /* Otherwise assert we are converting between types of the
3586 same kind. */
3587 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3589 error ("invalid types in nop conversion");
3590 debug_generic_expr (type);
3591 debug_generic_expr (TREE_TYPE (op));
3592 return true;
3595 return false;
3598 case FLOAT_EXPR:
3600 tree op = TREE_OPERAND (expr, 0);
3601 if (!is_gimple_val (op))
3603 error ("invalid operand in int to float conversion");
3604 return true;
3606 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3607 || !SCALAR_FLOAT_TYPE_P (type))
3609 error ("invalid types in conversion to floating point");
3610 debug_generic_expr (type);
3611 debug_generic_expr (TREE_TYPE (op));
3612 return true;
3614 return false;
3617 case FIX_TRUNC_EXPR:
3619 tree op = TREE_OPERAND (expr, 0);
3620 if (!is_gimple_val (op))
3622 error ("invalid operand in float to int conversion");
3623 return true;
3625 if (!INTEGRAL_TYPE_P (type)
3626 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3628 error ("invalid types in conversion to integer");
3629 debug_generic_expr (type);
3630 debug_generic_expr (TREE_TYPE (op));
3631 return true;
3633 return false;
3636 case COMPLEX_EXPR:
3638 tree op0 = TREE_OPERAND (expr, 0);
3639 tree op1 = TREE_OPERAND (expr, 1);
3640 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3642 error ("invalid operands in complex expression");
3643 return true;
3645 if (!TREE_CODE (type) == COMPLEX_TYPE
3646 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3647 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3648 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3649 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3650 || !useless_type_conversion_p (TREE_TYPE (type),
3651 TREE_TYPE (op0))
3652 || !useless_type_conversion_p (TREE_TYPE (type),
3653 TREE_TYPE (op1)))
3655 error ("type mismatch in complex expression");
3656 debug_generic_stmt (TREE_TYPE (expr));
3657 debug_generic_stmt (TREE_TYPE (op0));
3658 debug_generic_stmt (TREE_TYPE (op1));
3659 return true;
3661 return false;
3664 case CONSTRUCTOR:
3666 /* This is used like COMPLEX_EXPR but for vectors. */
3667 if (TREE_CODE (type) != VECTOR_TYPE)
3669 error ("constructor not allowed for non-vector types");
3670 debug_generic_stmt (type);
3671 return true;
3673 /* FIXME: verify constructor arguments. */
3674 return false;
3677 case LSHIFT_EXPR:
3678 case RSHIFT_EXPR:
3679 case LROTATE_EXPR:
3680 case RROTATE_EXPR:
3682 tree op0 = TREE_OPERAND (expr, 0);
3683 tree op1 = TREE_OPERAND (expr, 1);
3684 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3686 error ("invalid operands in shift expression");
3687 return true;
3689 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3690 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3692 error ("type mismatch in shift expression");
3693 debug_generic_stmt (TREE_TYPE (expr));
3694 debug_generic_stmt (TREE_TYPE (op0));
3695 debug_generic_stmt (TREE_TYPE (op1));
3696 return true;
3698 return false;
3701 case PLUS_EXPR:
3702 case MINUS_EXPR:
3704 tree op0 = TREE_OPERAND (expr, 0);
3705 tree op1 = TREE_OPERAND (expr, 1);
3706 if (POINTER_TYPE_P (type)
3707 || POINTER_TYPE_P (TREE_TYPE (op0))
3708 || POINTER_TYPE_P (TREE_TYPE (op1)))
3710 error ("invalid (pointer) operands to plus/minus");
3711 return true;
3713 /* Continue with generic binary expression handling. */
3714 break;
3717 case POINTER_PLUS_EXPR:
3719 tree op0 = TREE_OPERAND (expr, 0);
3720 tree op1 = TREE_OPERAND (expr, 1);
3721 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3723 error ("invalid operands in pointer plus expression");
3724 return true;
3726 if (!POINTER_TYPE_P (TREE_TYPE (op0))
3727 || TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE
3728 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3729 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3731 error ("type mismatch in pointer plus expression");
3732 debug_generic_stmt (type);
3733 debug_generic_stmt (TREE_TYPE (op0));
3734 debug_generic_stmt (TREE_TYPE (op1));
3735 return true;
3737 return false;
3740 case COND_EXPR:
3742 tree op0 = TREE_OPERAND (expr, 0);
3743 tree op1 = TREE_OPERAND (expr, 1);
3744 tree op2 = TREE_OPERAND (expr, 2);
3745 if ((!is_gimple_val (op1)
3746 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3747 || (!is_gimple_val (op2)
3748 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3750 error ("invalid operands in conditional expression");
3751 return true;
3753 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3754 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3755 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3756 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3757 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3759 error ("type mismatch in conditional expression");
3760 debug_generic_stmt (type);
3761 debug_generic_stmt (TREE_TYPE (op0));
3762 debug_generic_stmt (TREE_TYPE (op1));
3763 debug_generic_stmt (TREE_TYPE (op2));
3764 return true;
3766 return verify_gimple_expr (op0);
3769 case ADDR_EXPR:
3771 tree op = TREE_OPERAND (expr, 0);
3772 if (!is_gimple_addressable (op))
3774 error ("invalid operand in unary expression");
3775 return true;
3777 if (TYPE_POINTER_TO (TREE_TYPE (op))
3778 && !useless_type_conversion_p (type,
3779 TYPE_POINTER_TO (TREE_TYPE (op)))
3780 /* FIXME: a longstanding wart, &a == &a[0]. */
3781 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
3782 || (TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (op)))
3783 && !useless_type_conversion_p (type,
3784 TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (op)))))))
3786 error ("type mismatch in address expression");
3787 debug_generic_stmt (TREE_TYPE (expr));
3788 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3789 return true;
3792 return verify_gimple_reference (op);
3795 case TRUTH_ANDIF_EXPR:
3796 case TRUTH_ORIF_EXPR:
3797 case TRUTH_AND_EXPR:
3798 case TRUTH_OR_EXPR:
3799 case TRUTH_XOR_EXPR:
3801 tree op0 = TREE_OPERAND (expr, 0);
3802 tree op1 = TREE_OPERAND (expr, 1);
3804 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3806 error ("invalid operands in truth expression");
3807 return true;
3810 /* We allow any kind of integral typed argument and result. */
3811 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3812 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3813 || !INTEGRAL_TYPE_P (type))
3815 error ("type mismatch in binary truth expression");
3816 debug_generic_stmt (type);
3817 debug_generic_stmt (TREE_TYPE (op0));
3818 debug_generic_stmt (TREE_TYPE (op1));
3819 return true;
3822 return false;
3825 case TRUTH_NOT_EXPR:
3827 tree op = TREE_OPERAND (expr, 0);
3829 if (!is_gimple_val (op))
3831 error ("invalid operand in unary not");
3832 return true;
3835 /* For TRUTH_NOT_EXPR we can have any kind of integral
3836 typed arguments and results. */
3837 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3838 || !INTEGRAL_TYPE_P (type))
3840 error ("type mismatch in not expression");
3841 debug_generic_expr (TREE_TYPE (expr));
3842 debug_generic_expr (TREE_TYPE (op));
3843 return true;
3846 return false;
3849 case CALL_EXPR:
3850 /* FIXME. The C frontend passes unpromoted arguments in case it
3851 didn't see a function declaration before the call. */
3852 return false;
3854 default:;
3857 /* Generic handling via classes. */
3858 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3860 case tcc_unary:
3861 return verify_gimple_unary_expr (expr);
3863 case tcc_binary:
3864 return verify_gimple_binary_expr (expr);
3866 case tcc_reference:
3867 return verify_gimple_reference (expr);
3869 case tcc_comparison:
3871 tree op0 = TREE_OPERAND (expr, 0);
3872 tree op1 = TREE_OPERAND (expr, 1);
3873 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3875 error ("invalid operands in comparison expression");
3876 return true;
3878 /* For comparisons we do not have the operations type as the
3879 effective type the comparison is carried out in. Instead
3880 we require that either the first operand is trivially
3881 convertible into the second, or the other way around.
3882 The resulting type of a comparison may be any integral type.
3883 Because we special-case pointers to void we allow
3884 comparisons of pointers with the same mode as well. */
3885 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
3886 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
3887 && (!POINTER_TYPE_P (TREE_TYPE (op0))
3888 || !POINTER_TYPE_P (TREE_TYPE (op1))
3889 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
3890 || !INTEGRAL_TYPE_P (type))
3892 error ("type mismatch in comparison expression");
3893 debug_generic_stmt (TREE_TYPE (expr));
3894 debug_generic_stmt (TREE_TYPE (op0));
3895 debug_generic_stmt (TREE_TYPE (op1));
3896 return true;
3898 break;
3901 default:
3902 gcc_unreachable ();
3905 return false;
3908 /* Verify the GIMPLE assignment statement STMT. Returns true if there
3909 is an error, otherwise false. */
3911 static bool
3912 verify_gimple_modify_stmt (const_tree stmt)
3914 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3915 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3917 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3919 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3920 TREE_TYPE (rhs)))
3922 error ("non-trivial conversion at assignment");
3923 debug_generic_expr (TREE_TYPE (lhs));
3924 debug_generic_expr (TREE_TYPE (rhs));
3925 return true;
3928 /* Loads/stores from/to a variable are ok. */
3929 if ((is_gimple_val (lhs)
3930 && is_gimple_variable (rhs))
3931 || (is_gimple_val (rhs)
3932 && is_gimple_variable (lhs)))
3933 return false;
3935 /* Aggregate copies are ok. */
3936 if (!is_gimple_reg_type (TREE_TYPE (lhs))
3937 && !is_gimple_reg_type (TREE_TYPE (rhs)))
3938 return false;
3940 /* We might get 'loads' from a parameter which is not a gimple value. */
3941 if (TREE_CODE (rhs) == PARM_DECL)
3942 return verify_gimple_expr (lhs);
3944 if (!is_gimple_variable (lhs)
3945 && verify_gimple_expr (lhs))
3946 return true;
3948 if (!is_gimple_variable (rhs)
3949 && verify_gimple_expr (rhs))
3950 return true;
3952 return false;
3955 /* Verify the GIMPLE statement STMT. Returns true if there is an
3956 error, otherwise false. */
3958 static bool
3959 verify_gimple_stmt (tree stmt)
3961 if (!is_gimple_stmt (stmt))
3963 error ("is not a valid GIMPLE statement");
3964 return true;
3967 if (OMP_DIRECTIVE_P (stmt))
3969 /* OpenMP directives are validated by the FE and never operated
3970 on by the optimizers. Furthermore, OMP_FOR may contain
3971 non-gimple expressions when the main index variable has had
3972 its address taken. This does not affect the loop itself
3973 because the header of an OMP_FOR is merely used to determine
3974 how to setup the parallel iteration. */
3975 return false;
3978 switch (TREE_CODE (stmt))
3980 case GIMPLE_MODIFY_STMT:
3981 return verify_gimple_modify_stmt (stmt);
3983 case GOTO_EXPR:
3984 case LABEL_EXPR:
3985 return false;
3987 case SWITCH_EXPR:
3988 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
3990 error ("invalid operand to switch statement");
3991 debug_generic_expr (TREE_OPERAND (stmt, 0));
3993 return false;
3995 case RETURN_EXPR:
3997 tree op = TREE_OPERAND (stmt, 0);
3999 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4001 error ("type error in return expression");
4002 return true;
4005 if (op == NULL_TREE
4006 || TREE_CODE (op) == RESULT_DECL)
4007 return false;
4009 return verify_gimple_modify_stmt (op);
4012 case CALL_EXPR:
4013 case COND_EXPR:
4014 return verify_gimple_expr (stmt);
4016 case NOP_EXPR:
4017 case CHANGE_DYNAMIC_TYPE_EXPR:
4018 case ASM_EXPR:
4019 return false;
4021 default:
4022 gcc_unreachable ();
4026 /* Verify the GIMPLE statements inside the statement list STMTS. */
4028 void
4029 verify_gimple_1 (tree stmts)
4031 tree_stmt_iterator tsi;
4033 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4035 tree stmt = tsi_stmt (tsi);
4037 switch (TREE_CODE (stmt))
4039 case BIND_EXPR:
4040 verify_gimple_1 (BIND_EXPR_BODY (stmt));
4041 break;
4043 case TRY_CATCH_EXPR:
4044 case TRY_FINALLY_EXPR:
4045 verify_gimple_1 (TREE_OPERAND (stmt, 0));
4046 verify_gimple_1 (TREE_OPERAND (stmt, 1));
4047 break;
4049 case CATCH_EXPR:
4050 verify_gimple_1 (CATCH_BODY (stmt));
4051 break;
4053 case EH_FILTER_EXPR:
4054 verify_gimple_1 (EH_FILTER_FAILURE (stmt));
4055 break;
4057 default:
4058 if (verify_gimple_stmt (stmt))
4059 debug_generic_expr (stmt);
4064 /* Verify the GIMPLE statements inside the current function. */
4066 void
4067 verify_gimple (void)
4069 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4072 /* Verify STMT, return true if STMT is not in GIMPLE form.
4073 TODO: Implement type checking. */
4075 static bool
4076 verify_stmt (tree stmt, bool last_in_block)
4078 tree addr;
4080 if (OMP_DIRECTIVE_P (stmt))
4082 /* OpenMP directives are validated by the FE and never operated
4083 on by the optimizers. Furthermore, OMP_FOR may contain
4084 non-gimple expressions when the main index variable has had
4085 its address taken. This does not affect the loop itself
4086 because the header of an OMP_FOR is merely used to determine
4087 how to setup the parallel iteration. */
4088 return false;
4091 if (!is_gimple_stmt (stmt))
4093 error ("is not a valid GIMPLE statement");
4094 goto fail;
4097 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4098 if (addr)
4100 debug_generic_stmt (addr);
4101 return true;
4104 /* If the statement is marked as part of an EH region, then it is
4105 expected that the statement could throw. Verify that when we
4106 have optimizations that simplify statements such that we prove
4107 that they cannot throw, that we update other data structures
4108 to match. */
4109 if (lookup_stmt_eh_region (stmt) >= 0)
4111 if (!tree_could_throw_p (stmt))
4113 error ("statement marked for throw, but doesn%'t");
4114 goto fail;
4116 if (!last_in_block && tree_can_throw_internal (stmt))
4118 error ("statement marked for throw in middle of block");
4119 goto fail;
4123 return false;
4125 fail:
4126 debug_generic_stmt (stmt);
4127 return true;
4131 /* Return true when the T can be shared. */
4133 static bool
4134 tree_node_can_be_shared (tree t)
4136 if (IS_TYPE_OR_DECL_P (t)
4137 || is_gimple_min_invariant (t)
4138 || TREE_CODE (t) == SSA_NAME
4139 || t == error_mark_node
4140 || TREE_CODE (t) == IDENTIFIER_NODE)
4141 return true;
4143 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4144 return true;
4146 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4147 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4148 || TREE_CODE (t) == COMPONENT_REF
4149 || TREE_CODE (t) == REALPART_EXPR
4150 || TREE_CODE (t) == IMAGPART_EXPR)
4151 t = TREE_OPERAND (t, 0);
4153 if (DECL_P (t))
4154 return true;
4156 return false;
4160 /* Called via walk_trees. Verify tree sharing. */
4162 static tree
4163 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4165 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4167 if (tree_node_can_be_shared (*tp))
4169 *walk_subtrees = false;
4170 return NULL;
4173 if (pointer_set_insert (visited, *tp))
4174 return *tp;
4176 return NULL;
4180 /* Helper function for verify_gimple_tuples. */
4182 static tree
4183 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4184 void *data ATTRIBUTE_UNUSED)
4186 switch (TREE_CODE (*tp))
4188 case MODIFY_EXPR:
4189 error ("unexpected non-tuple");
4190 debug_tree (*tp);
4191 gcc_unreachable ();
4192 return NULL_TREE;
4194 default:
4195 return NULL_TREE;
4199 /* Verify that there are no trees that should have been converted to
4200 gimple tuples. Return true if T contains a node that should have
4201 been converted to a gimple tuple, but hasn't. */
4203 static bool
4204 verify_gimple_tuples (tree t)
4206 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4209 static bool eh_error_found;
4210 static int
4211 verify_eh_throw_stmt_node (void **slot, void *data)
4213 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4214 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4216 if (!pointer_set_contains (visited, node->stmt))
4218 error ("Dead STMT in EH table");
4219 debug_generic_stmt (node->stmt);
4220 eh_error_found = true;
4222 return 0;
4225 /* Verify the GIMPLE statement chain. */
4227 void
4228 verify_stmts (void)
4230 basic_block bb;
4231 block_stmt_iterator bsi;
4232 bool err = false;
4233 struct pointer_set_t *visited, *visited_stmts;
4234 tree addr;
4236 timevar_push (TV_TREE_STMT_VERIFY);
4237 visited = pointer_set_create ();
4238 visited_stmts = pointer_set_create ();
4240 FOR_EACH_BB (bb)
4242 tree phi;
4243 int i;
4245 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4247 int phi_num_args = PHI_NUM_ARGS (phi);
4249 pointer_set_insert (visited_stmts, phi);
4250 if (bb_for_stmt (phi) != bb)
4252 error ("bb_for_stmt (phi) is set to a wrong basic block");
4253 err |= true;
4256 for (i = 0; i < phi_num_args; i++)
4258 tree t = PHI_ARG_DEF (phi, i);
4259 tree addr;
4261 /* Addressable variables do have SSA_NAMEs but they
4262 are not considered gimple values. */
4263 if (TREE_CODE (t) != SSA_NAME
4264 && TREE_CODE (t) != FUNCTION_DECL
4265 && !is_gimple_val (t))
4267 error ("PHI def is not a GIMPLE value");
4268 debug_generic_stmt (phi);
4269 debug_generic_stmt (t);
4270 err |= true;
4273 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
4274 if (addr)
4276 debug_generic_stmt (addr);
4277 err |= true;
4280 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4281 if (addr)
4283 error ("incorrect sharing of tree nodes");
4284 debug_generic_stmt (phi);
4285 debug_generic_stmt (addr);
4286 err |= true;
4291 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4293 tree stmt = bsi_stmt (bsi);
4295 pointer_set_insert (visited_stmts, stmt);
4296 err |= verify_gimple_tuples (stmt);
4298 if (bb_for_stmt (stmt) != bb)
4300 error ("bb_for_stmt (stmt) is set to a wrong basic block");
4301 err |= true;
4304 bsi_next (&bsi);
4305 err |= verify_stmt (stmt, bsi_end_p (bsi));
4306 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
4307 if (addr)
4309 error ("incorrect sharing of tree nodes");
4310 debug_generic_stmt (stmt);
4311 debug_generic_stmt (addr);
4312 err |= true;
4316 eh_error_found = false;
4317 if (get_eh_throw_stmt_table (cfun))
4318 htab_traverse (get_eh_throw_stmt_table (cfun),
4319 verify_eh_throw_stmt_node,
4320 visited_stmts);
4322 if (err | eh_error_found)
4323 internal_error ("verify_stmts failed");
4325 pointer_set_destroy (visited);
4326 pointer_set_destroy (visited_stmts);
4327 verify_histograms ();
4328 timevar_pop (TV_TREE_STMT_VERIFY);
4332 /* Verifies that the flow information is OK. */
4334 static int
4335 tree_verify_flow_info (void)
4337 int err = 0;
4338 basic_block bb;
4339 block_stmt_iterator bsi;
4340 tree stmt;
4341 edge e;
4342 edge_iterator ei;
4344 if (ENTRY_BLOCK_PTR->il.tree)
4346 error ("ENTRY_BLOCK has IL associated with it");
4347 err = 1;
4350 if (EXIT_BLOCK_PTR->il.tree)
4352 error ("EXIT_BLOCK has IL associated with it");
4353 err = 1;
4356 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4357 if (e->flags & EDGE_FALLTHRU)
4359 error ("fallthru to exit from bb %d", e->src->index);
4360 err = 1;
4363 FOR_EACH_BB (bb)
4365 bool found_ctrl_stmt = false;
4367 stmt = NULL_TREE;
4369 /* Skip labels on the start of basic block. */
4370 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4372 tree prev_stmt = stmt;
4374 stmt = bsi_stmt (bsi);
4376 if (TREE_CODE (stmt) != LABEL_EXPR)
4377 break;
4379 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4381 error ("nonlocal label ");
4382 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4383 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4384 bb->index);
4385 err = 1;
4388 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
4390 error ("label ");
4391 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4392 fprintf (stderr, " to block does not match in bb %d",
4393 bb->index);
4394 err = 1;
4397 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
4398 != current_function_decl)
4400 error ("label ");
4401 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4402 fprintf (stderr, " has incorrect context in bb %d",
4403 bb->index);
4404 err = 1;
4408 /* Verify that body of basic block BB is free of control flow. */
4409 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4411 tree stmt = bsi_stmt (bsi);
4413 if (found_ctrl_stmt)
4415 error ("control flow in the middle of basic block %d",
4416 bb->index);
4417 err = 1;
4420 if (stmt_ends_bb_p (stmt))
4421 found_ctrl_stmt = true;
4423 if (TREE_CODE (stmt) == LABEL_EXPR)
4425 error ("label ");
4426 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4427 fprintf (stderr, " in the middle of basic block %d", bb->index);
4428 err = 1;
4432 bsi = bsi_last (bb);
4433 if (bsi_end_p (bsi))
4434 continue;
4436 stmt = bsi_stmt (bsi);
4438 err |= verify_eh_edges (stmt);
4440 if (is_ctrl_stmt (stmt))
4442 FOR_EACH_EDGE (e, ei, bb->succs)
4443 if (e->flags & EDGE_FALLTHRU)
4445 error ("fallthru edge after a control statement in bb %d",
4446 bb->index);
4447 err = 1;
4451 if (TREE_CODE (stmt) != COND_EXPR)
4453 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4454 after anything else but if statement. */
4455 FOR_EACH_EDGE (e, ei, bb->succs)
4456 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4458 error ("true/false edge after a non-COND_EXPR in bb %d",
4459 bb->index);
4460 err = 1;
4464 switch (TREE_CODE (stmt))
4466 case COND_EXPR:
4468 edge true_edge;
4469 edge false_edge;
4471 if (COND_EXPR_THEN (stmt) != NULL_TREE
4472 || COND_EXPR_ELSE (stmt) != NULL_TREE)
4474 error ("COND_EXPR with code in branches at the end of bb %d",
4475 bb->index);
4476 err = 1;
4479 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4481 if (!true_edge || !false_edge
4482 || !(true_edge->flags & EDGE_TRUE_VALUE)
4483 || !(false_edge->flags & EDGE_FALSE_VALUE)
4484 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4485 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4486 || EDGE_COUNT (bb->succs) >= 3)
4488 error ("wrong outgoing edge flags at end of bb %d",
4489 bb->index);
4490 err = 1;
4493 break;
4495 case GOTO_EXPR:
4496 if (simple_goto_p (stmt))
4498 error ("explicit goto at end of bb %d", bb->index);
4499 err = 1;
4501 else
4503 /* FIXME. We should double check that the labels in the
4504 destination blocks have their address taken. */
4505 FOR_EACH_EDGE (e, ei, bb->succs)
4506 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4507 | EDGE_FALSE_VALUE))
4508 || !(e->flags & EDGE_ABNORMAL))
4510 error ("wrong outgoing edge flags at end of bb %d",
4511 bb->index);
4512 err = 1;
4515 break;
4517 case RETURN_EXPR:
4518 if (!single_succ_p (bb)
4519 || (single_succ_edge (bb)->flags
4520 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4521 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4523 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4524 err = 1;
4526 if (single_succ (bb) != EXIT_BLOCK_PTR)
4528 error ("return edge does not point to exit in bb %d",
4529 bb->index);
4530 err = 1;
4532 break;
4534 case SWITCH_EXPR:
4536 tree prev;
4537 edge e;
4538 size_t i, n;
4539 tree vec;
4541 vec = SWITCH_LABELS (stmt);
4542 n = TREE_VEC_LENGTH (vec);
4544 /* Mark all the destination basic blocks. */
4545 for (i = 0; i < n; ++i)
4547 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4548 basic_block label_bb = label_to_block (lab);
4550 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4551 label_bb->aux = (void *)1;
4554 /* Verify that the case labels are sorted. */
4555 prev = TREE_VEC_ELT (vec, 0);
4556 for (i = 1; i < n - 1; ++i)
4558 tree c = TREE_VEC_ELT (vec, i);
4559 if (! CASE_LOW (c))
4561 error ("found default case not at end of case vector");
4562 err = 1;
4563 continue;
4565 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4567 error ("case labels not sorted: ");
4568 print_generic_expr (stderr, prev, 0);
4569 fprintf (stderr," is greater than ");
4570 print_generic_expr (stderr, c, 0);
4571 fprintf (stderr," but comes before it.\n");
4572 err = 1;
4574 prev = c;
4576 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4578 error ("no default case found at end of case vector");
4579 err = 1;
4582 FOR_EACH_EDGE (e, ei, bb->succs)
4584 if (!e->dest->aux)
4586 error ("extra outgoing edge %d->%d",
4587 bb->index, e->dest->index);
4588 err = 1;
4590 e->dest->aux = (void *)2;
4591 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4592 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4594 error ("wrong outgoing edge flags at end of bb %d",
4595 bb->index);
4596 err = 1;
4600 /* Check that we have all of them. */
4601 for (i = 0; i < n; ++i)
4603 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4604 basic_block label_bb = label_to_block (lab);
4606 if (label_bb->aux != (void *)2)
4608 error ("missing edge %i->%i",
4609 bb->index, label_bb->index);
4610 err = 1;
4614 FOR_EACH_EDGE (e, ei, bb->succs)
4615 e->dest->aux = (void *)0;
4618 default: ;
4622 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4623 verify_dominators (CDI_DOMINATORS);
4625 return err;
4629 /* Updates phi nodes after creating a forwarder block joined
4630 by edge FALLTHRU. */
4632 static void
4633 tree_make_forwarder_block (edge fallthru)
4635 edge e;
4636 edge_iterator ei;
4637 basic_block dummy, bb;
4638 tree phi, new_phi, var;
4640 dummy = fallthru->src;
4641 bb = fallthru->dest;
4643 if (single_pred_p (bb))
4644 return;
4646 /* If we redirected a branch we must create new PHI nodes at the
4647 start of BB. */
4648 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4650 var = PHI_RESULT (phi);
4651 new_phi = create_phi_node (var, bb);
4652 SSA_NAME_DEF_STMT (var) = new_phi;
4653 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4654 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4657 /* Ensure that the PHI node chain is in the same order. */
4658 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4660 /* Add the arguments we have stored on edges. */
4661 FOR_EACH_EDGE (e, ei, bb->preds)
4663 if (e == fallthru)
4664 continue;
4666 flush_pending_stmts (e);
4671 /* Return a non-special label in the head of basic block BLOCK.
4672 Create one if it doesn't exist. */
4674 tree
4675 tree_block_label (basic_block bb)
4677 block_stmt_iterator i, s = bsi_start (bb);
4678 bool first = true;
4679 tree label, stmt;
4681 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4683 stmt = bsi_stmt (i);
4684 if (TREE_CODE (stmt) != LABEL_EXPR)
4685 break;
4686 label = LABEL_EXPR_LABEL (stmt);
4687 if (!DECL_NONLOCAL (label))
4689 if (!first)
4690 bsi_move_before (&i, &s);
4691 return label;
4695 label = create_artificial_label ();
4696 stmt = build1 (LABEL_EXPR, void_type_node, label);
4697 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4698 return label;
4702 /* Attempt to perform edge redirection by replacing a possibly complex
4703 jump instruction by a goto or by removing the jump completely.
4704 This can apply only if all edges now point to the same block. The
4705 parameters and return values are equivalent to
4706 redirect_edge_and_branch. */
4708 static edge
4709 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4711 basic_block src = e->src;
4712 block_stmt_iterator b;
4713 tree stmt;
4715 /* We can replace or remove a complex jump only when we have exactly
4716 two edges. */
4717 if (EDGE_COUNT (src->succs) != 2
4718 /* Verify that all targets will be TARGET. Specifically, the
4719 edge that is not E must also go to TARGET. */
4720 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4721 return NULL;
4723 b = bsi_last (src);
4724 if (bsi_end_p (b))
4725 return NULL;
4726 stmt = bsi_stmt (b);
4728 if (TREE_CODE (stmt) == COND_EXPR
4729 || TREE_CODE (stmt) == SWITCH_EXPR)
4731 bsi_remove (&b, true);
4732 e = ssa_redirect_edge (e, target);
4733 e->flags = EDGE_FALLTHRU;
4734 return e;
4737 return NULL;
4741 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4742 edge representing the redirected branch. */
4744 static edge
4745 tree_redirect_edge_and_branch (edge e, basic_block dest)
4747 basic_block bb = e->src;
4748 block_stmt_iterator bsi;
4749 edge ret;
4750 tree stmt;
4752 if (e->flags & EDGE_ABNORMAL)
4753 return NULL;
4755 if (e->src != ENTRY_BLOCK_PTR
4756 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4757 return ret;
4759 if (e->dest == dest)
4760 return NULL;
4762 bsi = bsi_last (bb);
4763 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4765 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4767 case COND_EXPR:
4768 /* For COND_EXPR, we only need to redirect the edge. */
4769 break;
4771 case GOTO_EXPR:
4772 /* No non-abnormal edges should lead from a non-simple goto, and
4773 simple ones should be represented implicitly. */
4774 gcc_unreachable ();
4776 case SWITCH_EXPR:
4778 tree cases = get_cases_for_edge (e, stmt);
4779 tree label = tree_block_label (dest);
4781 /* If we have a list of cases associated with E, then use it
4782 as it's a lot faster than walking the entire case vector. */
4783 if (cases)
4785 edge e2 = find_edge (e->src, dest);
4786 tree last, first;
4788 first = cases;
4789 while (cases)
4791 last = cases;
4792 CASE_LABEL (cases) = label;
4793 cases = TREE_CHAIN (cases);
4796 /* If there was already an edge in the CFG, then we need
4797 to move all the cases associated with E to E2. */
4798 if (e2)
4800 tree cases2 = get_cases_for_edge (e2, stmt);
4802 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4803 TREE_CHAIN (cases2) = first;
4806 else
4808 tree vec = SWITCH_LABELS (stmt);
4809 size_t i, n = TREE_VEC_LENGTH (vec);
4811 for (i = 0; i < n; i++)
4813 tree elt = TREE_VEC_ELT (vec, i);
4815 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4816 CASE_LABEL (elt) = label;
4820 break;
4823 case RETURN_EXPR:
4824 bsi_remove (&bsi, true);
4825 e->flags |= EDGE_FALLTHRU;
4826 break;
4828 case OMP_RETURN:
4829 case OMP_CONTINUE:
4830 case OMP_SECTIONS_SWITCH:
4831 case OMP_FOR:
4832 /* The edges from OMP constructs can be simply redirected. */
4833 break;
4835 default:
4836 /* Otherwise it must be a fallthru edge, and we don't need to
4837 do anything besides redirecting it. */
4838 gcc_assert (e->flags & EDGE_FALLTHRU);
4839 break;
4842 /* Update/insert PHI nodes as necessary. */
4844 /* Now update the edges in the CFG. */
4845 e = ssa_redirect_edge (e, dest);
4847 return e;
4850 /* Returns true if it is possible to remove edge E by redirecting
4851 it to the destination of the other edge from E->src. */
4853 static bool
4854 tree_can_remove_branch_p (const_edge e)
4856 if (e->flags & EDGE_ABNORMAL)
4857 return false;
4859 return true;
4862 /* Simple wrapper, as we can always redirect fallthru edges. */
4864 static basic_block
4865 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4867 e = tree_redirect_edge_and_branch (e, dest);
4868 gcc_assert (e);
4870 return NULL;
4874 /* Splits basic block BB after statement STMT (but at least after the
4875 labels). If STMT is NULL, BB is split just after the labels. */
4877 static basic_block
4878 tree_split_block (basic_block bb, void *stmt)
4880 block_stmt_iterator bsi;
4881 tree_stmt_iterator tsi_tgt;
4882 tree act, list;
4883 basic_block new_bb;
4884 edge e;
4885 edge_iterator ei;
4887 new_bb = create_empty_bb (bb);
4889 /* Redirect the outgoing edges. */
4890 new_bb->succs = bb->succs;
4891 bb->succs = NULL;
4892 FOR_EACH_EDGE (e, ei, new_bb->succs)
4893 e->src = new_bb;
4895 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4896 stmt = NULL;
4898 /* Move everything from BSI to the new basic block. */
4899 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4901 act = bsi_stmt (bsi);
4902 if (TREE_CODE (act) == LABEL_EXPR)
4903 continue;
4905 if (!stmt)
4906 break;
4908 if (stmt == act)
4910 bsi_next (&bsi);
4911 break;
4915 if (bsi_end_p (bsi))
4916 return new_bb;
4918 /* Split the statement list - avoid re-creating new containers as this
4919 brings ugly quadratic memory consumption in the inliner.
4920 (We are still quadratic since we need to update stmt BB pointers,
4921 sadly.) */
4922 list = tsi_split_statement_list_before (&bsi.tsi);
4923 set_bb_stmt_list (new_bb, list);
4924 for (tsi_tgt = tsi_start (list);
4925 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4926 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4928 return new_bb;
4932 /* Moves basic block BB after block AFTER. */
4934 static bool
4935 tree_move_block_after (basic_block bb, basic_block after)
4937 if (bb->prev_bb == after)
4938 return true;
4940 unlink_block (bb);
4941 link_block (bb, after);
4943 return true;
4947 /* Return true if basic_block can be duplicated. */
4949 static bool
4950 tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4952 return true;
4956 /* Create a duplicate of the basic block BB. NOTE: This does not
4957 preserve SSA form. */
4959 static basic_block
4960 tree_duplicate_bb (basic_block bb)
4962 basic_block new_bb;
4963 block_stmt_iterator bsi, bsi_tgt;
4964 tree phi;
4966 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4968 /* Copy the PHI nodes. We ignore PHI node arguments here because
4969 the incoming edges have not been setup yet. */
4970 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4972 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4973 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4976 /* Keep the chain of PHI nodes in the same order so that they can be
4977 updated by ssa_redirect_edge. */
4978 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4980 bsi_tgt = bsi_start (new_bb);
4981 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4983 def_operand_p def_p;
4984 ssa_op_iter op_iter;
4985 tree stmt, copy;
4986 int region;
4988 stmt = bsi_stmt (bsi);
4989 if (TREE_CODE (stmt) == LABEL_EXPR)
4990 continue;
4992 /* Create a new copy of STMT and duplicate STMT's virtual
4993 operands. */
4994 copy = unshare_expr (stmt);
4995 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4996 copy_virtual_operands (copy, stmt);
4997 region = lookup_stmt_eh_region (stmt);
4998 if (region >= 0)
4999 add_stmt_to_eh_region (copy, region);
5000 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5002 /* Create new names for all the definitions created by COPY and
5003 add replacement mappings for each new name. */
5004 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5005 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5008 return new_bb;
5011 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5013 static void
5014 add_phi_args_after_copy_edge (edge e_copy)
5016 basic_block bb, bb_copy = e_copy->src, dest;
5017 edge e;
5018 edge_iterator ei;
5019 tree phi, phi_copy, phi_next, def;
5021 if (!phi_nodes (e_copy->dest))
5022 return;
5024 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5026 if (e_copy->dest->flags & BB_DUPLICATED)
5027 dest = get_bb_original (e_copy->dest);
5028 else
5029 dest = e_copy->dest;
5031 e = find_edge (bb, dest);
5032 if (!e)
5034 /* During loop unrolling the target of the latch edge is copied.
5035 In this case we are not looking for edge to dest, but to
5036 duplicated block whose original was dest. */
5037 FOR_EACH_EDGE (e, ei, bb->succs)
5039 if ((e->dest->flags & BB_DUPLICATED)
5040 && get_bb_original (e->dest) == dest)
5041 break;
5044 gcc_assert (e != NULL);
5047 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5048 phi;
5049 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5051 phi_next = PHI_CHAIN (phi);
5052 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5053 add_phi_arg (phi_copy, def, e_copy);
5058 /* Basic block BB_COPY was created by code duplication. Add phi node
5059 arguments for edges going out of BB_COPY. The blocks that were
5060 duplicated have BB_DUPLICATED set. */
5062 void
5063 add_phi_args_after_copy_bb (basic_block bb_copy)
5065 edge_iterator ei;
5066 edge e_copy;
5068 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5070 add_phi_args_after_copy_edge (e_copy);
5074 /* Blocks in REGION_COPY array of length N_REGION were created by
5075 duplication of basic blocks. Add phi node arguments for edges
5076 going from these blocks. If E_COPY is not NULL, also add
5077 phi node arguments for its destination.*/
5079 void
5080 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5081 edge e_copy)
5083 unsigned i;
5085 for (i = 0; i < n_region; i++)
5086 region_copy[i]->flags |= BB_DUPLICATED;
5088 for (i = 0; i < n_region; i++)
5089 add_phi_args_after_copy_bb (region_copy[i]);
5090 if (e_copy)
5091 add_phi_args_after_copy_edge (e_copy);
5093 for (i = 0; i < n_region; i++)
5094 region_copy[i]->flags &= ~BB_DUPLICATED;
5097 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5098 important exit edge EXIT. By important we mean that no SSA name defined
5099 inside region is live over the other exit edges of the region. All entry
5100 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5101 to the duplicate of the region. SSA form, dominance and loop information
5102 is updated. The new basic blocks are stored to REGION_COPY in the same
5103 order as they had in REGION, provided that REGION_COPY is not NULL.
5104 The function returns false if it is unable to copy the region,
5105 true otherwise. */
5107 bool
5108 tree_duplicate_sese_region (edge entry, edge exit,
5109 basic_block *region, unsigned n_region,
5110 basic_block *region_copy)
5112 unsigned i;
5113 bool free_region_copy = false, copying_header = false;
5114 struct loop *loop = entry->dest->loop_father;
5115 edge exit_copy;
5116 VEC (basic_block, heap) *doms;
5117 edge redirected;
5118 int total_freq = 0, entry_freq = 0;
5119 gcov_type total_count = 0, entry_count = 0;
5121 if (!can_copy_bbs_p (region, n_region))
5122 return false;
5124 /* Some sanity checking. Note that we do not check for all possible
5125 missuses of the functions. I.e. if you ask to copy something weird,
5126 it will work, but the state of structures probably will not be
5127 correct. */
5128 for (i = 0; i < n_region; i++)
5130 /* We do not handle subloops, i.e. all the blocks must belong to the
5131 same loop. */
5132 if (region[i]->loop_father != loop)
5133 return false;
5135 if (region[i] != entry->dest
5136 && region[i] == loop->header)
5137 return false;
5140 set_loop_copy (loop, loop);
5142 /* In case the function is used for loop header copying (which is the primary
5143 use), ensure that EXIT and its copy will be new latch and entry edges. */
5144 if (loop->header == entry->dest)
5146 copying_header = true;
5147 set_loop_copy (loop, loop_outer (loop));
5149 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5150 return false;
5152 for (i = 0; i < n_region; i++)
5153 if (region[i] != exit->src
5154 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5155 return false;
5158 if (!region_copy)
5160 region_copy = XNEWVEC (basic_block, n_region);
5161 free_region_copy = true;
5164 gcc_assert (!need_ssa_update_p ());
5166 /* Record blocks outside the region that are dominated by something
5167 inside. */
5168 doms = NULL;
5169 initialize_original_copy_tables ();
5171 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5173 if (entry->dest->count)
5175 total_count = entry->dest->count;
5176 entry_count = entry->count;
5177 /* Fix up corner cases, to avoid division by zero or creation of negative
5178 frequencies. */
5179 if (entry_count > total_count)
5180 entry_count = total_count;
5182 else
5184 total_freq = entry->dest->frequency;
5185 entry_freq = EDGE_FREQUENCY (entry);
5186 /* Fix up corner cases, to avoid division by zero or creation of negative
5187 frequencies. */
5188 if (total_freq == 0)
5189 total_freq = 1;
5190 else if (entry_freq > total_freq)
5191 entry_freq = total_freq;
5194 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5195 split_edge_bb_loc (entry));
5196 if (total_count)
5198 scale_bbs_frequencies_gcov_type (region, n_region,
5199 total_count - entry_count,
5200 total_count);
5201 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5202 total_count);
5204 else
5206 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5207 total_freq);
5208 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5211 if (copying_header)
5213 loop->header = exit->dest;
5214 loop->latch = exit->src;
5217 /* Redirect the entry and add the phi node arguments. */
5218 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5219 gcc_assert (redirected != NULL);
5220 flush_pending_stmts (entry);
5222 /* Concerning updating of dominators: We must recount dominators
5223 for entry block and its copy. Anything that is outside of the
5224 region, but was dominated by something inside needs recounting as
5225 well. */
5226 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5227 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5228 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5229 VEC_free (basic_block, heap, doms);
5231 /* Add the other PHI node arguments. */
5232 add_phi_args_after_copy (region_copy, n_region, NULL);
5234 /* Update the SSA web. */
5235 update_ssa (TODO_update_ssa);
5237 if (free_region_copy)
5238 free (region_copy);
5240 free_original_copy_tables ();
5241 return true;
5244 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5245 are stored to REGION_COPY in the same order in that they appear
5246 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5247 the region, EXIT an exit from it. The condition guarding EXIT
5248 is moved to ENTRY. Returns true if duplication succeeds, false
5249 otherwise.
5251 For example,
5253 some_code;
5254 if (cond)
5256 else
5259 is transformed to
5261 if (cond)
5263 some_code;
5266 else
5268 some_code;
5273 bool
5274 tree_duplicate_sese_tail (edge entry, edge exit,
5275 basic_block *region, unsigned n_region,
5276 basic_block *region_copy)
5278 unsigned i;
5279 bool free_region_copy = false;
5280 struct loop *loop = exit->dest->loop_father;
5281 struct loop *orig_loop = entry->dest->loop_father;
5282 basic_block switch_bb, entry_bb, nentry_bb;
5283 VEC (basic_block, heap) *doms;
5284 int total_freq = 0, exit_freq = 0;
5285 gcov_type total_count = 0, exit_count = 0;
5286 edge exits[2], nexits[2], e;
5287 block_stmt_iterator bsi;
5288 tree cond;
5289 edge sorig, snew;
5291 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5292 exits[0] = exit;
5293 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5295 if (!can_copy_bbs_p (region, n_region))
5296 return false;
5298 /* Some sanity checking. Note that we do not check for all possible
5299 missuses of the functions. I.e. if you ask to copy something weird
5300 (e.g., in the example, if there is a jump from inside to the middle
5301 of some_code, or come_code defines some of the values used in cond)
5302 it will work, but the resulting code will not be correct. */
5303 for (i = 0; i < n_region; i++)
5305 /* We do not handle subloops, i.e. all the blocks must belong to the
5306 same loop. */
5307 if (region[i]->loop_father != orig_loop)
5308 return false;
5310 if (region[i] == orig_loop->latch)
5311 return false;
5314 initialize_original_copy_tables ();
5315 set_loop_copy (orig_loop, loop);
5317 if (!region_copy)
5319 region_copy = XNEWVEC (basic_block, n_region);
5320 free_region_copy = true;
5323 gcc_assert (!need_ssa_update_p ());
5325 /* Record blocks outside the region that are dominated by something
5326 inside. */
5327 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5329 if (exit->src->count)
5331 total_count = exit->src->count;
5332 exit_count = exit->count;
5333 /* Fix up corner cases, to avoid division by zero or creation of negative
5334 frequencies. */
5335 if (exit_count > total_count)
5336 exit_count = total_count;
5338 else
5340 total_freq = exit->src->frequency;
5341 exit_freq = EDGE_FREQUENCY (exit);
5342 /* Fix up corner cases, to avoid division by zero or creation of negative
5343 frequencies. */
5344 if (total_freq == 0)
5345 total_freq = 1;
5346 if (exit_freq > total_freq)
5347 exit_freq = total_freq;
5350 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5351 split_edge_bb_loc (exit));
5352 if (total_count)
5354 scale_bbs_frequencies_gcov_type (region, n_region,
5355 total_count - exit_count,
5356 total_count);
5357 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5358 total_count);
5360 else
5362 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5363 total_freq);
5364 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5367 /* Create the switch block, and put the exit condition to it. */
5368 entry_bb = entry->dest;
5369 nentry_bb = get_bb_copy (entry_bb);
5370 if (!last_stmt (entry->src)
5371 || !stmt_ends_bb_p (last_stmt (entry->src)))
5372 switch_bb = entry->src;
5373 else
5374 switch_bb = split_edge (entry);
5375 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5377 bsi = bsi_last (switch_bb);
5378 cond = last_stmt (exit->src);
5379 gcc_assert (TREE_CODE (cond) == COND_EXPR);
5380 bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
5382 sorig = single_succ_edge (switch_bb);
5383 sorig->flags = exits[1]->flags;
5384 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5386 /* Register the new edge from SWITCH_BB in loop exit lists. */
5387 rescan_loop_exit (snew, true, false);
5389 /* Add the PHI node arguments. */
5390 add_phi_args_after_copy (region_copy, n_region, snew);
5392 /* Get rid of now superfluous conditions and associated edges (and phi node
5393 arguments). */
5394 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5395 PENDING_STMT (e) = NULL_TREE;
5396 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5397 PENDING_STMT (e) = NULL_TREE;
5399 /* Anything that is outside of the region, but was dominated by something
5400 inside needs to update dominance info. */
5401 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5402 VEC_free (basic_block, heap, doms);
5404 /* Update the SSA web. */
5405 update_ssa (TODO_update_ssa);
5407 if (free_region_copy)
5408 free (region_copy);
5410 free_original_copy_tables ();
5411 return true;
5415 DEF_VEC_P(basic_block);
5416 DEF_VEC_ALLOC_P(basic_block,heap);
5419 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5420 adding blocks when the dominator traversal reaches EXIT. This
5421 function silently assumes that ENTRY strictly dominates EXIT. */
5423 static void
5424 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5425 VEC(basic_block,heap) **bbs_p)
5427 basic_block son;
5429 for (son = first_dom_son (CDI_DOMINATORS, entry);
5430 son;
5431 son = next_dom_son (CDI_DOMINATORS, son))
5433 VEC_safe_push (basic_block, heap, *bbs_p, son);
5434 if (son != exit)
5435 gather_blocks_in_sese_region (son, exit, bbs_p);
5439 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5440 The duplicates are recorded in VARS_MAP. */
5442 static void
5443 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5444 tree to_context)
5446 tree t = *tp, new_t;
5447 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5448 void **loc;
5450 if (DECL_CONTEXT (t) == to_context)
5451 return;
5453 loc = pointer_map_contains (vars_map, t);
5455 if (!loc)
5457 loc = pointer_map_insert (vars_map, t);
5459 if (SSA_VAR_P (t))
5461 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5462 f->unexpanded_var_list
5463 = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
5465 else
5467 gcc_assert (TREE_CODE (t) == CONST_DECL);
5468 new_t = copy_node (t);
5470 DECL_CONTEXT (new_t) = to_context;
5472 *loc = new_t;
5474 else
5475 new_t = *loc;
5477 *tp = new_t;
5480 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5481 VARS_MAP maps old ssa names and var_decls to the new ones. */
5483 static tree
5484 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5485 tree to_context)
5487 void **loc;
5488 tree new_name, decl = SSA_NAME_VAR (name);
5490 gcc_assert (is_gimple_reg (name));
5492 loc = pointer_map_contains (vars_map, name);
5494 if (!loc)
5496 replace_by_duplicate_decl (&decl, vars_map, to_context);
5498 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5499 if (gimple_in_ssa_p (cfun))
5500 add_referenced_var (decl);
5502 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5503 if (SSA_NAME_IS_DEFAULT_DEF (name))
5504 set_default_def (decl, new_name);
5505 pop_cfun ();
5507 loc = pointer_map_insert (vars_map, name);
5508 *loc = new_name;
5510 else
5511 new_name = *loc;
5513 return new_name;
5516 struct move_stmt_d
5518 tree block;
5519 tree from_context;
5520 tree to_context;
5521 struct pointer_map_t *vars_map;
5522 htab_t new_label_map;
5523 bool remap_decls_p;
5526 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5527 contained in *TP and change the DECL_CONTEXT of every local
5528 variable referenced in *TP. */
5530 static tree
5531 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
5533 struct move_stmt_d *p = (struct move_stmt_d *) data;
5534 tree t = *tp;
5536 if (p->block
5537 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
5538 TREE_BLOCK (t) = p->block;
5540 if (OMP_DIRECTIVE_P (t)
5541 && TREE_CODE (t) != OMP_RETURN
5542 && TREE_CODE (t) != OMP_CONTINUE)
5544 /* Do not remap variables inside OMP directives. Variables
5545 referenced in clauses and directive header belong to the
5546 parent function and should not be moved into the child
5547 function. */
5548 bool save_remap_decls_p = p->remap_decls_p;
5549 p->remap_decls_p = false;
5550 *walk_subtrees = 0;
5552 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
5554 p->remap_decls_p = save_remap_decls_p;
5556 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5558 if (TREE_CODE (t) == SSA_NAME)
5559 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5560 else if (TREE_CODE (t) == LABEL_DECL)
5562 if (p->new_label_map)
5564 struct tree_map in, *out;
5565 in.base.from = t;
5566 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5567 if (out)
5568 *tp = t = out->to;
5571 DECL_CONTEXT (t) = p->to_context;
5573 else if (p->remap_decls_p)
5575 /* Replace T with its duplicate. T should no longer appear in the
5576 parent function, so this looks wasteful; however, it may appear
5577 in referenced_vars, and more importantly, as virtual operands of
5578 statements, and in alias lists of other variables. It would be
5579 quite difficult to expunge it from all those places. ??? It might
5580 suffice to do this for addressable variables. */
5581 if ((TREE_CODE (t) == VAR_DECL
5582 && !is_global_var (t))
5583 || TREE_CODE (t) == CONST_DECL)
5584 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5586 if (SSA_VAR_P (t)
5587 && gimple_in_ssa_p (cfun))
5589 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5590 add_referenced_var (*tp);
5591 pop_cfun ();
5594 *walk_subtrees = 0;
5596 else if (TYPE_P (t))
5597 *walk_subtrees = 0;
5599 return NULL_TREE;
5602 /* Marks virtual operands of all statements in basic blocks BBS for
5603 renaming. */
5605 static void
5606 mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5608 tree phi;
5609 block_stmt_iterator bsi;
5610 basic_block bb;
5611 unsigned i;
5613 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5615 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5616 mark_virtual_ops_for_renaming (phi);
5618 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5619 mark_virtual_ops_for_renaming (bsi_stmt (bsi));
5623 /* Move basic block BB from function CFUN to function DEST_FN. The
5624 block is moved out of the original linked list and placed after
5625 block AFTER in the new list. Also, the block is removed from the
5626 original array of blocks and placed in DEST_FN's array of blocks.
5627 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5628 updated to reflect the moved edges.
5630 The local variables are remapped to new instances, VARS_MAP is used
5631 to record the mapping. */
5633 static void
5634 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5635 basic_block after, bool update_edge_count_p,
5636 struct pointer_map_t *vars_map, htab_t new_label_map,
5637 int eh_offset)
5639 struct control_flow_graph *cfg;
5640 edge_iterator ei;
5641 edge e;
5642 block_stmt_iterator si;
5643 struct move_stmt_d d;
5644 unsigned old_len, new_len;
5645 tree phi, next_phi;
5647 /* Remove BB from dominance structures. */
5648 delete_from_dominance_info (CDI_DOMINATORS, bb);
5649 if (current_loops)
5650 remove_bb_from_loops (bb);
5652 /* Link BB to the new linked list. */
5653 move_block_after (bb, after);
5655 /* Update the edge count in the corresponding flowgraphs. */
5656 if (update_edge_count_p)
5657 FOR_EACH_EDGE (e, ei, bb->succs)
5659 cfun->cfg->x_n_edges--;
5660 dest_cfun->cfg->x_n_edges++;
5663 /* Remove BB from the original basic block array. */
5664 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5665 cfun->cfg->x_n_basic_blocks--;
5667 /* Grow DEST_CFUN's basic block array if needed. */
5668 cfg = dest_cfun->cfg;
5669 cfg->x_n_basic_blocks++;
5670 if (bb->index >= cfg->x_last_basic_block)
5671 cfg->x_last_basic_block = bb->index + 1;
5673 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5674 if ((unsigned) cfg->x_last_basic_block >= old_len)
5676 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5677 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5678 new_len);
5681 VEC_replace (basic_block, cfg->x_basic_block_info,
5682 bb->index, bb);
5684 /* Remap the variables in phi nodes. */
5685 for (phi = phi_nodes (bb); phi; phi = next_phi)
5687 use_operand_p use;
5688 tree op = PHI_RESULT (phi);
5689 ssa_op_iter oi;
5691 next_phi = PHI_CHAIN (phi);
5692 if (!is_gimple_reg (op))
5694 /* Remove the phi nodes for virtual operands (alias analysis will be
5695 run for the new function, anyway). */
5696 remove_phi_node (phi, NULL, true);
5697 continue;
5700 SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
5701 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5703 op = USE_FROM_PTR (use);
5704 if (TREE_CODE (op) == SSA_NAME)
5705 SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
5709 /* The statements in BB need to be associated with a new TREE_BLOCK.
5710 Labels need to be associated with a new label-to-block map. */
5711 memset (&d, 0, sizeof (d));
5712 d.vars_map = vars_map;
5713 d.from_context = cfun->decl;
5714 d.to_context = dest_cfun->decl;
5715 d.new_label_map = new_label_map;
5717 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5719 tree stmt = bsi_stmt (si);
5720 int region;
5722 d.remap_decls_p = true;
5723 if (TREE_BLOCK (stmt))
5724 d.block = DECL_INITIAL (dest_cfun->decl);
5726 walk_tree (&stmt, move_stmt_r, &d, NULL);
5728 if (TREE_CODE (stmt) == LABEL_EXPR)
5730 tree label = LABEL_EXPR_LABEL (stmt);
5731 int uid = LABEL_DECL_UID (label);
5733 gcc_assert (uid > -1);
5735 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5736 if (old_len <= (unsigned) uid)
5738 new_len = 3 * uid / 2;
5739 VEC_safe_grow_cleared (basic_block, gc,
5740 cfg->x_label_to_block_map, new_len);
5743 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5744 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5746 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5748 if (uid >= dest_cfun->last_label_uid)
5749 dest_cfun->last_label_uid = uid + 1;
5751 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5752 TREE_OPERAND (stmt, 0) =
5753 build_int_cst (NULL_TREE,
5754 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5755 + eh_offset);
5757 region = lookup_stmt_eh_region (stmt);
5758 if (region >= 0)
5760 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5761 remove_stmt_from_eh_region (stmt);
5762 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5763 gimple_remove_stmt_histograms (cfun, stmt);
5766 /* We cannot leave any operands allocated from the operand caches of
5767 the current function. */
5768 free_stmt_operands (stmt);
5769 push_cfun (dest_cfun);
5770 update_stmt (stmt);
5771 pop_cfun ();
5775 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5776 the outermost EH region. Use REGION as the incoming base EH region. */
5778 static int
5779 find_outermost_region_in_block (struct function *src_cfun,
5780 basic_block bb, int region)
5782 block_stmt_iterator si;
5784 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5786 tree stmt = bsi_stmt (si);
5787 int stmt_region;
5789 if (TREE_CODE (stmt) == RESX_EXPR)
5790 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5791 else
5792 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5793 if (stmt_region > 0)
5795 if (region < 0)
5796 region = stmt_region;
5797 else if (stmt_region != region)
5799 region = eh_region_outermost (src_cfun, stmt_region, region);
5800 gcc_assert (region != -1);
5805 return region;
5808 static tree
5809 new_label_mapper (tree decl, void *data)
5811 htab_t hash = (htab_t) data;
5812 struct tree_map *m;
5813 void **slot;
5815 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5817 m = xmalloc (sizeof (struct tree_map));
5818 m->hash = DECL_UID (decl);
5819 m->base.from = decl;
5820 m->to = create_artificial_label ();
5821 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5823 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5824 gcc_assert (*slot == NULL);
5826 *slot = m;
5828 return m->to;
5831 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5832 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5833 single basic block in the original CFG and the new basic block is
5834 returned. DEST_CFUN must not have a CFG yet.
5836 Note that the region need not be a pure SESE region. Blocks inside
5837 the region may contain calls to abort/exit. The only restriction
5838 is that ENTRY_BB should be the only entry point and it must
5839 dominate EXIT_BB.
5841 All local variables referenced in the region are assumed to be in
5842 the corresponding BLOCK_VARS and unexpanded variable lists
5843 associated with DEST_CFUN. */
5845 basic_block
5846 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5847 basic_block exit_bb)
5849 VEC(basic_block,heap) *bbs, *dom_bbs;
5850 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5851 basic_block after, bb, *entry_pred, *exit_succ, abb;
5852 struct function *saved_cfun = cfun;
5853 int *entry_flag, *exit_flag, eh_offset;
5854 unsigned *entry_prob, *exit_prob;
5855 unsigned i, num_entry_edges, num_exit_edges;
5856 edge e;
5857 edge_iterator ei;
5858 htab_t new_label_map;
5859 struct pointer_map_t *vars_map;
5860 struct loop *loop = entry_bb->loop_father;
5862 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5863 region. */
5864 gcc_assert (entry_bb != exit_bb
5865 && (!exit_bb
5866 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5868 /* Collect all the blocks in the region. Manually add ENTRY_BB
5869 because it won't be added by dfs_enumerate_from. */
5870 bbs = NULL;
5871 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5872 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5874 /* The blocks that used to be dominated by something in BBS will now be
5875 dominated by the new block. */
5876 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5877 VEC_address (basic_block, bbs),
5878 VEC_length (basic_block, bbs));
5880 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5881 the predecessor edges to ENTRY_BB and the successor edges to
5882 EXIT_BB so that we can re-attach them to the new basic block that
5883 will replace the region. */
5884 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5885 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5886 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5887 entry_prob = XNEWVEC (unsigned, num_entry_edges);
5888 i = 0;
5889 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5891 entry_prob[i] = e->probability;
5892 entry_flag[i] = e->flags;
5893 entry_pred[i++] = e->src;
5894 remove_edge (e);
5897 if (exit_bb)
5899 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5900 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5901 sizeof (basic_block));
5902 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5903 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5904 i = 0;
5905 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5907 exit_prob[i] = e->probability;
5908 exit_flag[i] = e->flags;
5909 exit_succ[i++] = e->dest;
5910 remove_edge (e);
5913 else
5915 num_exit_edges = 0;
5916 exit_succ = NULL;
5917 exit_flag = NULL;
5918 exit_prob = NULL;
5921 /* Switch context to the child function to initialize DEST_FN's CFG. */
5922 gcc_assert (dest_cfun->cfg == NULL);
5923 push_cfun (dest_cfun);
5925 init_empty_tree_cfg ();
5927 /* Initialize EH information for the new function. */
5928 eh_offset = 0;
5929 new_label_map = NULL;
5930 if (saved_cfun->eh)
5932 int region = -1;
5934 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5935 region = find_outermost_region_in_block (saved_cfun, bb, region);
5937 init_eh_for_function ();
5938 if (region != -1)
5940 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
5941 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
5942 new_label_map, region, 0);
5946 pop_cfun ();
5948 /* The ssa form for virtual operands in the source function will have to
5949 be repaired. We do not care for the real operands -- the sese region
5950 must be closed with respect to those. */
5951 mark_virtual_ops_in_region (bbs);
5953 /* Move blocks from BBS into DEST_CFUN. */
5954 gcc_assert (VEC_length (basic_block, bbs) >= 2);
5955 after = dest_cfun->cfg->x_entry_block_ptr;
5956 vars_map = pointer_map_create ();
5957 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5959 /* No need to update edge counts on the last block. It has
5960 already been updated earlier when we detached the region from
5961 the original CFG. */
5962 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
5963 new_label_map, eh_offset);
5964 after = bb;
5967 if (new_label_map)
5968 htab_delete (new_label_map);
5969 pointer_map_destroy (vars_map);
5971 /* Rewire the entry and exit blocks. The successor to the entry
5972 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
5973 the child function. Similarly, the predecessor of DEST_FN's
5974 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
5975 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
5976 various CFG manipulation function get to the right CFG.
5978 FIXME, this is silly. The CFG ought to become a parameter to
5979 these helpers. */
5980 push_cfun (dest_cfun);
5981 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5982 if (exit_bb)
5983 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
5984 pop_cfun ();
5986 /* Back in the original function, the SESE region has disappeared,
5987 create a new basic block in its place. */
5988 bb = create_empty_bb (entry_pred[0]);
5989 if (current_loops)
5990 add_bb_to_loop (bb, loop);
5991 for (i = 0; i < num_entry_edges; i++)
5993 e = make_edge (entry_pred[i], bb, entry_flag[i]);
5994 e->probability = entry_prob[i];
5997 for (i = 0; i < num_exit_edges; i++)
5999 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6000 e->probability = exit_prob[i];
6003 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6004 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6005 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6006 VEC_free (basic_block, heap, dom_bbs);
6008 if (exit_bb)
6010 free (exit_prob);
6011 free (exit_flag);
6012 free (exit_succ);
6014 free (entry_prob);
6015 free (entry_flag);
6016 free (entry_pred);
6017 VEC_free (basic_block, heap, bbs);
6019 return bb;
6023 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6025 void
6026 dump_function_to_file (tree fn, FILE *file, int flags)
6028 tree arg, vars, var;
6029 struct function *dsf;
6030 bool ignore_topmost_bind = false, any_var = false;
6031 basic_block bb;
6032 tree chain;
6034 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6036 arg = DECL_ARGUMENTS (fn);
6037 while (arg)
6039 print_generic_expr (file, arg, dump_flags);
6040 if (TREE_CHAIN (arg))
6041 fprintf (file, ", ");
6042 arg = TREE_CHAIN (arg);
6044 fprintf (file, ")\n");
6046 dsf = DECL_STRUCT_FUNCTION (fn);
6047 if (dsf && (flags & TDF_DETAILS))
6048 dump_eh_tree (file, dsf);
6050 if (flags & TDF_RAW)
6052 dump_node (fn, TDF_SLIM | flags, file);
6053 return;
6056 /* Switch CFUN to point to FN. */
6057 push_cfun (DECL_STRUCT_FUNCTION (fn));
6059 /* When GIMPLE is lowered, the variables are no longer available in
6060 BIND_EXPRs, so display them separately. */
6061 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
6063 ignore_topmost_bind = true;
6065 fprintf (file, "{\n");
6066 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
6068 var = TREE_VALUE (vars);
6070 print_generic_decl (file, var, flags);
6071 fprintf (file, "\n");
6073 any_var = true;
6077 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6079 /* Make a CFG based dump. */
6080 check_bb_profile (ENTRY_BLOCK_PTR, file);
6081 if (!ignore_topmost_bind)
6082 fprintf (file, "{\n");
6084 if (any_var && n_basic_blocks)
6085 fprintf (file, "\n");
6087 FOR_EACH_BB (bb)
6088 dump_generic_bb (file, bb, 2, flags);
6090 fprintf (file, "}\n");
6091 check_bb_profile (EXIT_BLOCK_PTR, file);
6093 else
6095 int indent;
6097 /* Make a tree based dump. */
6098 chain = DECL_SAVED_TREE (fn);
6100 if (chain && TREE_CODE (chain) == BIND_EXPR)
6102 if (ignore_topmost_bind)
6104 chain = BIND_EXPR_BODY (chain);
6105 indent = 2;
6107 else
6108 indent = 0;
6110 else
6112 if (!ignore_topmost_bind)
6113 fprintf (file, "{\n");
6114 indent = 2;
6117 if (any_var)
6118 fprintf (file, "\n");
6120 print_generic_stmt_indented (file, chain, flags, indent);
6121 if (ignore_topmost_bind)
6122 fprintf (file, "}\n");
6125 fprintf (file, "\n\n");
6127 /* Restore CFUN. */
6128 pop_cfun ();
6132 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6134 void
6135 debug_function (tree fn, int flags)
6137 dump_function_to_file (fn, stderr, flags);
6141 /* Pretty print of the loops intermediate representation. */
6142 static void print_loop (FILE *, struct loop *, int);
6143 static void print_pred_bbs (FILE *, basic_block bb);
6144 static void print_succ_bbs (FILE *, basic_block bb);
6147 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6149 static void
6150 print_pred_bbs (FILE *file, basic_block bb)
6152 edge e;
6153 edge_iterator ei;
6155 FOR_EACH_EDGE (e, ei, bb->preds)
6156 fprintf (file, "bb_%d ", e->src->index);
6160 /* Print on FILE the indexes for the successors of basic_block BB. */
6162 static void
6163 print_succ_bbs (FILE *file, basic_block bb)
6165 edge e;
6166 edge_iterator ei;
6168 FOR_EACH_EDGE (e, ei, bb->succs)
6169 fprintf (file, "bb_%d ", e->dest->index);
6173 /* Pretty print LOOP on FILE, indented INDENT spaces. */
6175 static void
6176 print_loop (FILE *file, struct loop *loop, int indent)
6178 char *s_indent;
6179 basic_block bb;
6181 if (loop == NULL)
6182 return;
6184 s_indent = (char *) alloca ((size_t) indent + 1);
6185 memset ((void *) s_indent, ' ', (size_t) indent);
6186 s_indent[indent] = '\0';
6188 /* Print the loop's header. */
6189 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
6191 /* Print the loop's body. */
6192 fprintf (file, "%s{\n", s_indent);
6193 FOR_EACH_BB (bb)
6194 if (bb->loop_father == loop)
6196 /* Print the basic_block's header. */
6197 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6198 print_pred_bbs (file, bb);
6199 fprintf (file, "}, succs = {");
6200 print_succ_bbs (file, bb);
6201 fprintf (file, "})\n");
6203 /* Print the basic_block's body. */
6204 fprintf (file, "%s {\n", s_indent);
6205 tree_dump_bb (bb, file, indent + 4);
6206 fprintf (file, "%s }\n", s_indent);
6209 print_loop (file, loop->inner, indent + 2);
6210 fprintf (file, "%s}\n", s_indent);
6211 print_loop (file, loop->next, indent);
6215 /* Follow a CFG edge from the entry point of the program, and on entry
6216 of a loop, pretty print the loop structure on FILE. */
6218 void
6219 print_loop_ir (FILE *file)
6221 basic_block bb;
6223 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
6224 if (bb && bb->loop_father)
6225 print_loop (file, bb->loop_father, 0);
6229 /* Debugging loops structure at tree level. */
6231 void
6232 debug_loop_ir (void)
6234 print_loop_ir (stderr);
6238 /* Return true if BB ends with a call, possibly followed by some
6239 instructions that must stay with the call. Return false,
6240 otherwise. */
6242 static bool
6243 tree_block_ends_with_call_p (basic_block bb)
6245 block_stmt_iterator bsi = bsi_last (bb);
6246 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6250 /* Return true if BB ends with a conditional branch. Return false,
6251 otherwise. */
6253 static bool
6254 tree_block_ends_with_condjump_p (const_basic_block bb)
6256 /* This CONST_CAST is okay because last_stmt doesn't modify its
6257 argument and the return value is not modified. */
6258 const_tree stmt = last_stmt (CONST_CAST_BB(bb));
6259 return (stmt && TREE_CODE (stmt) == COND_EXPR);
6263 /* Return true if we need to add fake edge to exit at statement T.
6264 Helper function for tree_flow_call_edges_add. */
6266 static bool
6267 need_fake_edge_p (tree t)
6269 tree call;
6271 /* NORETURN and LONGJMP calls already have an edge to exit.
6272 CONST and PURE calls do not need one.
6273 We don't currently check for CONST and PURE here, although
6274 it would be a good idea, because those attributes are
6275 figured out from the RTL in mark_constant_function, and
6276 the counter incrementation code from -fprofile-arcs
6277 leads to different results from -fbranch-probabilities. */
6278 call = get_call_expr_in (t);
6279 if (call
6280 && !(call_expr_flags (call) & ECF_NORETURN))
6281 return true;
6283 if (TREE_CODE (t) == ASM_EXPR
6284 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
6285 return true;
6287 return false;
6291 /* Add fake edges to the function exit for any non constant and non
6292 noreturn calls, volatile inline assembly in the bitmap of blocks
6293 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6294 the number of blocks that were split.
6296 The goal is to expose cases in which entering a basic block does
6297 not imply that all subsequent instructions must be executed. */
6299 static int
6300 tree_flow_call_edges_add (sbitmap blocks)
6302 int i;
6303 int blocks_split = 0;
6304 int last_bb = last_basic_block;
6305 bool check_last_block = false;
6307 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6308 return 0;
6310 if (! blocks)
6311 check_last_block = true;
6312 else
6313 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6315 /* In the last basic block, before epilogue generation, there will be
6316 a fallthru edge to EXIT. Special care is required if the last insn
6317 of the last basic block is a call because make_edge folds duplicate
6318 edges, which would result in the fallthru edge also being marked
6319 fake, which would result in the fallthru edge being removed by
6320 remove_fake_edges, which would result in an invalid CFG.
6322 Moreover, we can't elide the outgoing fake edge, since the block
6323 profiler needs to take this into account in order to solve the minimal
6324 spanning tree in the case that the call doesn't return.
6326 Handle this by adding a dummy instruction in a new last basic block. */
6327 if (check_last_block)
6329 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6330 block_stmt_iterator bsi = bsi_last (bb);
6331 tree t = NULL_TREE;
6332 if (!bsi_end_p (bsi))
6333 t = bsi_stmt (bsi);
6335 if (t && need_fake_edge_p (t))
6337 edge e;
6339 e = find_edge (bb, EXIT_BLOCK_PTR);
6340 if (e)
6342 bsi_insert_on_edge (e, build_empty_stmt ());
6343 bsi_commit_edge_inserts ();
6348 /* Now add fake edges to the function exit for any non constant
6349 calls since there is no way that we can determine if they will
6350 return or not... */
6351 for (i = 0; i < last_bb; i++)
6353 basic_block bb = BASIC_BLOCK (i);
6354 block_stmt_iterator bsi;
6355 tree stmt, last_stmt;
6357 if (!bb)
6358 continue;
6360 if (blocks && !TEST_BIT (blocks, i))
6361 continue;
6363 bsi = bsi_last (bb);
6364 if (!bsi_end_p (bsi))
6366 last_stmt = bsi_stmt (bsi);
6369 stmt = bsi_stmt (bsi);
6370 if (need_fake_edge_p (stmt))
6372 edge e;
6373 /* The handling above of the final block before the
6374 epilogue should be enough to verify that there is
6375 no edge to the exit block in CFG already.
6376 Calling make_edge in such case would cause us to
6377 mark that edge as fake and remove it later. */
6378 #ifdef ENABLE_CHECKING
6379 if (stmt == last_stmt)
6381 e = find_edge (bb, EXIT_BLOCK_PTR);
6382 gcc_assert (e == NULL);
6384 #endif
6386 /* Note that the following may create a new basic block
6387 and renumber the existing basic blocks. */
6388 if (stmt != last_stmt)
6390 e = split_block (bb, stmt);
6391 if (e)
6392 blocks_split++;
6394 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6396 bsi_prev (&bsi);
6398 while (!bsi_end_p (bsi));
6402 if (blocks_split)
6403 verify_flow_info ();
6405 return blocks_split;
6408 /* Purge dead abnormal call edges from basic block BB. */
6410 bool
6411 tree_purge_dead_abnormal_call_edges (basic_block bb)
6413 bool changed = tree_purge_dead_eh_edges (bb);
6415 if (current_function_has_nonlocal_label)
6417 tree stmt = last_stmt (bb);
6418 edge_iterator ei;
6419 edge e;
6421 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6422 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6424 if (e->flags & EDGE_ABNORMAL)
6426 remove_edge (e);
6427 changed = true;
6429 else
6430 ei_next (&ei);
6433 /* See tree_purge_dead_eh_edges below. */
6434 if (changed)
6435 free_dominance_info (CDI_DOMINATORS);
6438 return changed;
6441 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6443 static void
6444 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6446 basic_block son;
6448 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6449 for (son = first_dom_son (CDI_DOMINATORS, bb);
6450 son;
6451 son = next_dom_son (CDI_DOMINATORS, son))
6452 get_all_dominated_blocks (son, dom_bbs);
6455 /* Removes edge E and all the blocks dominated by it, and updates dominance
6456 information. The IL in E->src needs to be updated separately.
6457 If dominance info is not available, only the edge E is removed.*/
6459 void
6460 remove_edge_and_dominated_blocks (edge e)
6462 VEC (basic_block, heap) *bbs_to_remove = NULL;
6463 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6464 bitmap df, df_idom;
6465 edge f;
6466 edge_iterator ei;
6467 bool none_removed = false;
6468 unsigned i;
6469 basic_block bb, dbb;
6470 bitmap_iterator bi;
6472 if (!dom_info_available_p (CDI_DOMINATORS))
6474 remove_edge (e);
6475 return;
6478 /* No updating is needed for edges to exit. */
6479 if (e->dest == EXIT_BLOCK_PTR)
6481 if (cfgcleanup_altered_bbs)
6482 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6483 remove_edge (e);
6484 return;
6487 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6488 that is not dominated by E->dest, then this set is empty. Otherwise,
6489 all the basic blocks dominated by E->dest are removed.
6491 Also, to DF_IDOM we store the immediate dominators of the blocks in
6492 the dominance frontier of E (i.e., of the successors of the
6493 removed blocks, if there are any, and of E->dest otherwise). */
6494 FOR_EACH_EDGE (f, ei, e->dest->preds)
6496 if (f == e)
6497 continue;
6499 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6501 none_removed = true;
6502 break;
6506 df = BITMAP_ALLOC (NULL);
6507 df_idom = BITMAP_ALLOC (NULL);
6509 if (none_removed)
6510 bitmap_set_bit (df_idom,
6511 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6512 else
6514 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6515 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6517 FOR_EACH_EDGE (f, ei, bb->succs)
6519 if (f->dest != EXIT_BLOCK_PTR)
6520 bitmap_set_bit (df, f->dest->index);
6523 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6524 bitmap_clear_bit (df, bb->index);
6526 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6528 bb = BASIC_BLOCK (i);
6529 bitmap_set_bit (df_idom,
6530 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6534 if (cfgcleanup_altered_bbs)
6536 /* Record the set of the altered basic blocks. */
6537 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6538 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6541 /* Remove E and the cancelled blocks. */
6542 if (none_removed)
6543 remove_edge (e);
6544 else
6546 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6547 delete_basic_block (bb);
6550 /* Update the dominance information. The immediate dominator may change only
6551 for blocks whose immediate dominator belongs to DF_IDOM:
6553 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6554 removal. Let Z the arbitrary block such that idom(Z) = Y and
6555 Z dominates X after the removal. Before removal, there exists a path P
6556 from Y to X that avoids Z. Let F be the last edge on P that is
6557 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6558 dominates W, and because of P, Z does not dominate W), and W belongs to
6559 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6560 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6562 bb = BASIC_BLOCK (i);
6563 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6564 dbb;
6565 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6566 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6569 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6571 BITMAP_FREE (df);
6572 BITMAP_FREE (df_idom);
6573 VEC_free (basic_block, heap, bbs_to_remove);
6574 VEC_free (basic_block, heap, bbs_to_fix_dom);
6577 /* Purge dead EH edges from basic block BB. */
6579 bool
6580 tree_purge_dead_eh_edges (basic_block bb)
6582 bool changed = false;
6583 edge e;
6584 edge_iterator ei;
6585 tree stmt = last_stmt (bb);
6587 if (stmt && tree_can_throw_internal (stmt))
6588 return false;
6590 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6592 if (e->flags & EDGE_EH)
6594 remove_edge_and_dominated_blocks (e);
6595 changed = true;
6597 else
6598 ei_next (&ei);
6601 return changed;
6604 bool
6605 tree_purge_all_dead_eh_edges (const_bitmap blocks)
6607 bool changed = false;
6608 unsigned i;
6609 bitmap_iterator bi;
6611 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6613 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6616 return changed;
6619 /* This function is called whenever a new edge is created or
6620 redirected. */
6622 static void
6623 tree_execute_on_growing_pred (edge e)
6625 basic_block bb = e->dest;
6627 if (phi_nodes (bb))
6628 reserve_phi_args_for_new_edge (bb);
6631 /* This function is called immediately before edge E is removed from
6632 the edge vector E->dest->preds. */
6634 static void
6635 tree_execute_on_shrinking_pred (edge e)
6637 if (phi_nodes (e->dest))
6638 remove_phi_args (e);
6641 /*---------------------------------------------------------------------------
6642 Helper functions for Loop versioning
6643 ---------------------------------------------------------------------------*/
6645 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6646 of 'first'. Both of them are dominated by 'new_head' basic block. When
6647 'new_head' was created by 'second's incoming edge it received phi arguments
6648 on the edge by split_edge(). Later, additional edge 'e' was created to
6649 connect 'new_head' and 'first'. Now this routine adds phi args on this
6650 additional edge 'e' that new_head to second edge received as part of edge
6651 splitting.
6654 static void
6655 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6656 basic_block new_head, edge e)
6658 tree phi1, phi2;
6659 edge e2 = find_edge (new_head, second);
6661 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6662 edge, we should always have an edge from NEW_HEAD to SECOND. */
6663 gcc_assert (e2 != NULL);
6665 /* Browse all 'second' basic block phi nodes and add phi args to
6666 edge 'e' for 'first' head. PHI args are always in correct order. */
6668 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6669 phi2 && phi1;
6670 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6672 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6673 add_phi_arg (phi1, def, e);
6677 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6678 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6679 the destination of the ELSE part. */
6680 static void
6681 tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6682 basic_block second_head ATTRIBUTE_UNUSED,
6683 basic_block cond_bb, void *cond_e)
6685 block_stmt_iterator bsi;
6686 tree new_cond_expr = NULL_TREE;
6687 tree cond_expr = (tree) cond_e;
6688 edge e0;
6690 /* Build new conditional expr */
6691 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6692 NULL_TREE, NULL_TREE);
6694 /* Add new cond in cond_bb. */
6695 bsi = bsi_start (cond_bb);
6696 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
6697 /* Adjust edges appropriately to connect new head with first head
6698 as well as second head. */
6699 e0 = single_succ_edge (cond_bb);
6700 e0->flags &= ~EDGE_FALLTHRU;
6701 e0->flags |= EDGE_FALSE_VALUE;
6704 struct cfg_hooks tree_cfg_hooks = {
6705 "tree",
6706 tree_verify_flow_info,
6707 tree_dump_bb, /* dump_bb */
6708 create_bb, /* create_basic_block */
6709 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
6710 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
6711 tree_can_remove_branch_p, /* can_remove_branch_p */
6712 remove_bb, /* delete_basic_block */
6713 tree_split_block, /* split_block */
6714 tree_move_block_after, /* move_block_after */
6715 tree_can_merge_blocks_p, /* can_merge_blocks_p */
6716 tree_merge_blocks, /* merge_blocks */
6717 tree_predict_edge, /* predict_edge */
6718 tree_predicted_by_p, /* predicted_by_p */
6719 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
6720 tree_duplicate_bb, /* duplicate_block */
6721 tree_split_edge, /* split_edge */
6722 tree_make_forwarder_block, /* make_forward_block */
6723 NULL, /* tidy_fallthru_edge */
6724 tree_block_ends_with_call_p, /* block_ends_with_call_p */
6725 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6726 tree_flow_call_edges_add, /* flow_call_edges_add */
6727 tree_execute_on_growing_pred, /* execute_on_growing_pred */
6728 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6729 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6730 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6731 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6732 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6733 flush_pending_stmts /* flush_pending_stmts */
6737 /* Split all critical edges. */
6739 static unsigned int
6740 split_critical_edges (void)
6742 basic_block bb;
6743 edge e;
6744 edge_iterator ei;
6746 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6747 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6748 mappings around the calls to split_edge. */
6749 start_recording_case_labels ();
6750 FOR_ALL_BB (bb)
6752 FOR_EACH_EDGE (e, ei, bb->succs)
6753 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6755 split_edge (e);
6758 end_recording_case_labels ();
6759 return 0;
6762 struct tree_opt_pass pass_split_crit_edges =
6764 "crited", /* name */
6765 NULL, /* gate */
6766 split_critical_edges, /* execute */
6767 NULL, /* sub */
6768 NULL, /* next */
6769 0, /* static_pass_number */
6770 TV_TREE_SPLIT_EDGES, /* tv_id */
6771 PROP_cfg, /* properties required */
6772 PROP_no_crit_edges, /* properties_provided */
6773 0, /* properties_destroyed */
6774 0, /* todo_flags_start */
6775 TODO_dump_func, /* todo_flags_finish */
6776 0 /* letter */
6780 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
6781 a temporary, make sure and register it to be renamed if necessary,
6782 and finally return the temporary. Put the statements to compute
6783 EXP before the current statement in BSI. */
6785 tree
6786 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
6788 tree t, new_stmt, orig_stmt;
6790 if (is_gimple_val (exp))
6791 return exp;
6793 t = make_rename_temp (type, NULL);
6794 new_stmt = build_gimple_modify_stmt (t, exp);
6796 orig_stmt = bsi_stmt (*bsi);
6797 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
6798 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
6800 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
6801 if (gimple_in_ssa_p (cfun))
6802 mark_symbols_for_renaming (new_stmt);
6804 return t;
6807 /* Build a ternary operation and gimplify it. Emit code before BSI.
6808 Return the gimple_val holding the result. */
6810 tree
6811 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
6812 tree type, tree a, tree b, tree c)
6814 tree ret;
6816 ret = fold_build3 (code, type, a, b, c);
6817 STRIP_NOPS (ret);
6819 return gimplify_val (bsi, type, ret);
6822 /* Build a binary operation and gimplify it. Emit code before BSI.
6823 Return the gimple_val holding the result. */
6825 tree
6826 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
6827 tree type, tree a, tree b)
6829 tree ret;
6831 ret = fold_build2 (code, type, a, b);
6832 STRIP_NOPS (ret);
6834 return gimplify_val (bsi, type, ret);
6837 /* Build a unary operation and gimplify it. Emit code before BSI.
6838 Return the gimple_val holding the result. */
6840 tree
6841 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
6842 tree a)
6844 tree ret;
6846 ret = fold_build1 (code, type, a);
6847 STRIP_NOPS (ret);
6849 return gimplify_val (bsi, type, ret);
6854 /* Emit return warnings. */
6856 static unsigned int
6857 execute_warn_function_return (void)
6859 #ifdef USE_MAPPED_LOCATION
6860 source_location location;
6861 #else
6862 location_t *locus;
6863 #endif
6864 tree last;
6865 edge e;
6866 edge_iterator ei;
6868 /* If we have a path to EXIT, then we do return. */
6869 if (TREE_THIS_VOLATILE (cfun->decl)
6870 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6872 #ifdef USE_MAPPED_LOCATION
6873 location = UNKNOWN_LOCATION;
6874 #else
6875 locus = NULL;
6876 #endif
6877 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6879 last = last_stmt (e->src);
6880 if (TREE_CODE (last) == RETURN_EXPR
6881 #ifdef USE_MAPPED_LOCATION
6882 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
6883 #else
6884 && (locus = EXPR_LOCUS (last)) != NULL)
6885 #endif
6886 break;
6888 #ifdef USE_MAPPED_LOCATION
6889 if (location == UNKNOWN_LOCATION)
6890 location = cfun->function_end_locus;
6891 warning (0, "%H%<noreturn%> function does return", &location);
6892 #else
6893 if (!locus)
6894 locus = &cfun->function_end_locus;
6895 warning (0, "%H%<noreturn%> function does return", locus);
6896 #endif
6899 /* If we see "return;" in some basic block, then we do reach the end
6900 without returning a value. */
6901 else if (warn_return_type
6902 && !TREE_NO_WARNING (cfun->decl)
6903 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6904 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
6906 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6908 tree last = last_stmt (e->src);
6909 if (TREE_CODE (last) == RETURN_EXPR
6910 && TREE_OPERAND (last, 0) == NULL
6911 && !TREE_NO_WARNING (last))
6913 #ifdef USE_MAPPED_LOCATION
6914 location = EXPR_LOCATION (last);
6915 if (location == UNKNOWN_LOCATION)
6916 location = cfun->function_end_locus;
6917 warning (0, "%Hcontrol reaches end of non-void function", &location);
6918 #else
6919 locus = EXPR_LOCUS (last);
6920 if (!locus)
6921 locus = &cfun->function_end_locus;
6922 warning (0, "%Hcontrol reaches end of non-void function", locus);
6923 #endif
6924 TREE_NO_WARNING (cfun->decl) = 1;
6925 break;
6929 return 0;
6933 /* Given a basic block B which ends with a conditional and has
6934 precisely two successors, determine which of the edges is taken if
6935 the conditional is true and which is taken if the conditional is
6936 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
6938 void
6939 extract_true_false_edges_from_block (basic_block b,
6940 edge *true_edge,
6941 edge *false_edge)
6943 edge e = EDGE_SUCC (b, 0);
6945 if (e->flags & EDGE_TRUE_VALUE)
6947 *true_edge = e;
6948 *false_edge = EDGE_SUCC (b, 1);
6950 else
6952 *false_edge = e;
6953 *true_edge = EDGE_SUCC (b, 1);
6957 struct tree_opt_pass pass_warn_function_return =
6959 NULL, /* name */
6960 NULL, /* gate */
6961 execute_warn_function_return, /* execute */
6962 NULL, /* sub */
6963 NULL, /* next */
6964 0, /* static_pass_number */
6965 0, /* tv_id */
6966 PROP_cfg, /* properties_required */
6967 0, /* properties_provided */
6968 0, /* properties_destroyed */
6969 0, /* todo_flags_start */
6970 0, /* todo_flags_finish */
6971 0 /* letter */
6974 /* Emit noreturn warnings. */
6976 static unsigned int
6977 execute_warn_function_noreturn (void)
6979 if (warn_missing_noreturn
6980 && !TREE_THIS_VOLATILE (cfun->decl)
6981 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
6982 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
6983 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
6984 "for attribute %<noreturn%>",
6985 cfun->decl);
6986 return 0;
6989 struct tree_opt_pass pass_warn_function_noreturn =
6991 NULL, /* name */
6992 NULL, /* gate */
6993 execute_warn_function_noreturn, /* execute */
6994 NULL, /* sub */
6995 NULL, /* next */
6996 0, /* static_pass_number */
6997 0, /* tv_id */
6998 PROP_cfg, /* properties_required */
6999 0, /* properties_provided */
7000 0, /* properties_destroyed */
7001 0, /* todo_flags_start */
7002 0, /* todo_flags_finish */
7003 0 /* letter */