EnumSet*.class: Regenerate
[official-gcc.git] / gcc / tree-cfg.c
blob8e889fa0ecde4e5f058ecdf55a615c85589c70fe
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 static struct pointer_map_t *edge_to_cases;
73 /* CFG statistics. */
74 struct cfg_stats_d
76 long num_merged_labels;
79 static struct cfg_stats_d cfg_stats;
81 /* Nonzero if we found a computed goto while building basic blocks. */
82 static bool found_computed_goto;
84 /* Basic blocks and flowgraphs. */
85 static basic_block create_bb (void *, void *, basic_block);
86 static void make_blocks (tree);
87 static void factor_computed_gotos (void);
89 /* Edges. */
90 static void make_edges (void);
91 static void make_cond_expr_edges (basic_block);
92 static void make_switch_expr_edges (basic_block);
93 static void make_goto_expr_edges (basic_block);
94 static edge tree_redirect_edge_and_branch (edge, basic_block);
95 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
96 static unsigned int split_critical_edges (void);
98 /* Various helpers. */
99 static inline bool stmt_starts_bb_p (const_tree, const_tree);
100 static int tree_verify_flow_info (void);
101 static void tree_make_forwarder_block (edge);
102 static void tree_cfg2vcg (FILE *);
103 static inline void change_bb_for_stmt (tree t, basic_block bb);
105 /* Flowgraph optimization and cleanup. */
106 static void tree_merge_blocks (basic_block, basic_block);
107 static bool tree_can_merge_blocks_p (const_basic_block, const_basic_block);
108 static void remove_bb (basic_block);
109 static edge find_taken_edge_computed_goto (basic_block, tree);
110 static edge find_taken_edge_cond_expr (basic_block, tree);
111 static edge find_taken_edge_switch_expr (basic_block, tree);
112 static tree find_case_label_for_value (tree, tree);
114 void
115 init_empty_tree_cfg (void)
117 /* Initialize the basic block array. */
118 init_flow ();
119 profile_status = PROFILE_ABSENT;
120 n_basic_blocks = NUM_FIXED_BLOCKS;
121 last_basic_block = NUM_FIXED_BLOCKS;
122 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
123 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
124 initial_cfg_capacity);
126 /* Build a mapping of labels to their associated blocks. */
127 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
128 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
129 initial_cfg_capacity);
131 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
132 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
133 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
134 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
137 /*---------------------------------------------------------------------------
138 Create basic blocks
139 ---------------------------------------------------------------------------*/
141 /* Entry point to the CFG builder for trees. TP points to the list of
142 statements to be added to the flowgraph. */
144 static void
145 build_tree_cfg (tree *tp)
147 /* Register specific tree functions. */
148 tree_register_cfg_hooks ();
150 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
152 init_empty_tree_cfg ();
154 found_computed_goto = 0;
155 make_blocks (*tp);
157 /* Computed gotos are hell to deal with, especially if there are
158 lots of them with a large number of destinations. So we factor
159 them to a common computed goto location before we build the
160 edge list. After we convert back to normal form, we will un-factor
161 the computed gotos since factoring introduces an unwanted jump. */
162 if (found_computed_goto)
163 factor_computed_gotos ();
165 /* Make sure there is always at least one block, even if it's empty. */
166 if (n_basic_blocks == NUM_FIXED_BLOCKS)
167 create_empty_bb (ENTRY_BLOCK_PTR);
169 /* Adjust the size of the array. */
170 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
171 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
173 /* To speed up statement iterator walks, we first purge dead labels. */
174 cleanup_dead_labels ();
176 /* Group case nodes to reduce the number of edges.
177 We do this after cleaning up dead labels because otherwise we miss
178 a lot of obvious case merging opportunities. */
179 group_case_labels ();
181 /* Create the edges of the flowgraph. */
182 make_edges ();
183 cleanup_dead_labels ();
185 /* Debugging dumps. */
187 /* Write the flowgraph to a VCG file. */
189 int local_dump_flags;
190 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
191 if (vcg_file)
193 tree_cfg2vcg (vcg_file);
194 dump_end (TDI_vcg, vcg_file);
198 #ifdef ENABLE_CHECKING
199 verify_stmts ();
200 #endif
202 /* Dump a textual representation of the flowgraph. */
203 if (dump_file)
204 dump_tree_cfg (dump_file, dump_flags);
207 static unsigned int
208 execute_build_cfg (void)
210 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
211 return 0;
214 struct tree_opt_pass pass_build_cfg =
216 "cfg", /* name */
217 NULL, /* gate */
218 execute_build_cfg, /* execute */
219 NULL, /* sub */
220 NULL, /* next */
221 0, /* static_pass_number */
222 TV_TREE_CFG, /* tv_id */
223 PROP_gimple_leh, /* properties_required */
224 PROP_cfg, /* properties_provided */
225 0, /* properties_destroyed */
226 0, /* todo_flags_start */
227 TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
228 0 /* letter */
231 /* Search the CFG for any computed gotos. If found, factor them to a
232 common computed goto site. Also record the location of that site so
233 that we can un-factor the gotos after we have converted back to
234 normal form. */
236 static void
237 factor_computed_gotos (void)
239 basic_block bb;
240 tree factored_label_decl = NULL;
241 tree var = NULL;
242 tree factored_computed_goto_label = NULL;
243 tree factored_computed_goto = NULL;
245 /* We know there are one or more computed gotos in this function.
246 Examine the last statement in each basic block to see if the block
247 ends with a computed goto. */
249 FOR_EACH_BB (bb)
251 block_stmt_iterator bsi = bsi_last (bb);
252 tree last;
254 if (bsi_end_p (bsi))
255 continue;
256 last = bsi_stmt (bsi);
258 /* Ignore the computed goto we create when we factor the original
259 computed gotos. */
260 if (last == factored_computed_goto)
261 continue;
263 /* If the last statement is a computed goto, factor it. */
264 if (computed_goto_p (last))
266 tree assignment;
268 /* The first time we find a computed goto we need to create
269 the factored goto block and the variable each original
270 computed goto will use for their goto destination. */
271 if (! factored_computed_goto)
273 basic_block new_bb = create_empty_bb (bb);
274 block_stmt_iterator new_bsi = bsi_start (new_bb);
276 /* Create the destination of the factored goto. Each original
277 computed goto will put its desired destination into this
278 variable and jump to the label we create immediately
279 below. */
280 var = create_tmp_var (ptr_type_node, "gotovar");
282 /* Build a label for the new block which will contain the
283 factored computed goto. */
284 factored_label_decl = create_artificial_label ();
285 factored_computed_goto_label
286 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
287 bsi_insert_after (&new_bsi, factored_computed_goto_label,
288 BSI_NEW_STMT);
290 /* Build our new computed goto. */
291 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
292 bsi_insert_after (&new_bsi, factored_computed_goto,
293 BSI_NEW_STMT);
296 /* Copy the original computed goto's destination into VAR. */
297 assignment = build_gimple_modify_stmt (var,
298 GOTO_DESTINATION (last));
299 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
301 /* And re-vector the computed goto to the new destination. */
302 GOTO_DESTINATION (last) = factored_label_decl;
308 /* Build a flowgraph for the statement_list STMT_LIST. */
310 static void
311 make_blocks (tree stmt_list)
313 tree_stmt_iterator i = tsi_start (stmt_list);
314 tree stmt = NULL;
315 bool start_new_block = true;
316 bool first_stmt_of_list = true;
317 basic_block bb = ENTRY_BLOCK_PTR;
319 while (!tsi_end_p (i))
321 tree prev_stmt;
323 prev_stmt = stmt;
324 stmt = tsi_stmt (i);
326 /* If the statement starts a new basic block or if we have determined
327 in a previous pass that we need to create a new block for STMT, do
328 so now. */
329 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
331 if (!first_stmt_of_list)
332 stmt_list = tsi_split_statement_list_before (&i);
333 bb = create_basic_block (stmt_list, NULL, bb);
334 start_new_block = false;
337 /* Now add STMT to BB and create the subgraphs for special statement
338 codes. */
339 set_bb_for_stmt (stmt, bb);
341 if (computed_goto_p (stmt))
342 found_computed_goto = true;
344 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
345 next iteration. */
346 if (stmt_ends_bb_p (stmt))
347 start_new_block = true;
349 tsi_next (&i);
350 first_stmt_of_list = false;
355 /* Create and return a new empty basic block after bb AFTER. */
357 static basic_block
358 create_bb (void *h, void *e, basic_block after)
360 basic_block bb;
362 gcc_assert (!e);
364 /* Create and initialize a new basic block. Since alloc_block uses
365 ggc_alloc_cleared to allocate a basic block, we do not have to
366 clear the newly allocated basic block here. */
367 bb = alloc_block ();
369 bb->index = last_basic_block;
370 bb->flags = BB_NEW;
371 bb->il.tree = GGC_CNEW (struct tree_bb_info);
372 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
374 /* Add the new block to the linked list of blocks. */
375 link_block (bb, after);
377 /* Grow the basic block array if needed. */
378 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
380 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
381 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
384 /* Add the newly created block to the array. */
385 SET_BASIC_BLOCK (last_basic_block, bb);
387 n_basic_blocks++;
388 last_basic_block++;
390 return bb;
394 /*---------------------------------------------------------------------------
395 Edge creation
396 ---------------------------------------------------------------------------*/
398 /* Fold COND_EXPR_COND of each COND_EXPR. */
400 void
401 fold_cond_expr_cond (void)
403 basic_block bb;
405 FOR_EACH_BB (bb)
407 tree stmt = last_stmt (bb);
409 if (stmt
410 && TREE_CODE (stmt) == COND_EXPR)
412 tree cond;
413 bool zerop, onep;
415 fold_defer_overflow_warnings ();
416 cond = fold (COND_EXPR_COND (stmt));
417 zerop = integer_zerop (cond);
418 onep = integer_onep (cond);
419 fold_undefer_overflow_warnings (((zerop || onep)
420 && !TREE_NO_WARNING (stmt)),
421 stmt,
422 WARN_STRICT_OVERFLOW_CONDITIONAL);
423 if (zerop)
424 COND_EXPR_COND (stmt) = boolean_false_node;
425 else if (onep)
426 COND_EXPR_COND (stmt) = boolean_true_node;
431 /* Join all the blocks in the flowgraph. */
433 static void
434 make_edges (void)
436 basic_block bb;
437 struct omp_region *cur_region = NULL;
439 /* Create an edge from entry to the first block with executable
440 statements in it. */
441 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
443 /* Traverse the basic block array placing edges. */
444 FOR_EACH_BB (bb)
446 tree last = last_stmt (bb);
447 bool fallthru;
449 if (last)
451 enum tree_code code = TREE_CODE (last);
452 switch (code)
454 case GOTO_EXPR:
455 make_goto_expr_edges (bb);
456 fallthru = false;
457 break;
458 case RETURN_EXPR:
459 make_edge (bb, EXIT_BLOCK_PTR, 0);
460 fallthru = false;
461 break;
462 case COND_EXPR:
463 make_cond_expr_edges (bb);
464 fallthru = false;
465 break;
466 case SWITCH_EXPR:
467 make_switch_expr_edges (bb);
468 fallthru = false;
469 break;
470 case RESX_EXPR:
471 make_eh_edges (last);
472 fallthru = false;
473 break;
475 case CALL_EXPR:
476 /* If this function receives a nonlocal goto, then we need to
477 make edges from this call site to all the nonlocal goto
478 handlers. */
479 if (tree_can_make_abnormal_goto (last))
480 make_abnormal_goto_edges (bb, true);
482 /* If this statement has reachable exception handlers, then
483 create abnormal edges to them. */
484 make_eh_edges (last);
486 /* Some calls are known not to return. */
487 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
488 break;
490 case MODIFY_EXPR:
491 gcc_unreachable ();
493 case GIMPLE_MODIFY_STMT:
494 if (is_ctrl_altering_stmt (last))
496 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
497 the CALL_EXPR may have an abnormal edge. Search the RHS
498 for this case and create any required edges. */
499 if (tree_can_make_abnormal_goto (last))
500 make_abnormal_goto_edges (bb, true);
502 make_eh_edges (last);
504 fallthru = true;
505 break;
507 case OMP_PARALLEL:
508 case OMP_FOR:
509 case OMP_SINGLE:
510 case OMP_MASTER:
511 case OMP_ORDERED:
512 case OMP_CRITICAL:
513 case OMP_SECTION:
514 cur_region = new_omp_region (bb, code, cur_region);
515 fallthru = true;
516 break;
518 case OMP_SECTIONS:
519 cur_region = new_omp_region (bb, code, cur_region);
520 fallthru = true;
521 break;
523 case OMP_SECTIONS_SWITCH:
524 fallthru = false;
525 break;
527 case OMP_RETURN:
528 /* In the case of an OMP_SECTION, the edge will go somewhere
529 other than the next block. This will be created later. */
530 cur_region->exit = bb;
531 fallthru = cur_region->type != OMP_SECTION;
532 cur_region = cur_region->outer;
533 break;
535 case OMP_CONTINUE:
536 cur_region->cont = bb;
537 switch (cur_region->type)
539 case OMP_FOR:
540 /* Make the loopback edge. */
541 make_edge (bb, single_succ (cur_region->entry), 0);
543 /* Create an edge from OMP_FOR to exit, which corresponds to
544 the case that the body of the loop is not executed at
545 all. */
546 make_edge (cur_region->entry, bb->next_bb, 0);
547 fallthru = true;
548 break;
550 case OMP_SECTIONS:
551 /* Wire up the edges into and out of the nested sections. */
553 basic_block switch_bb = single_succ (cur_region->entry);
555 struct omp_region *i;
556 for (i = cur_region->inner; i ; i = i->next)
558 gcc_assert (i->type == OMP_SECTION);
559 make_edge (switch_bb, i->entry, 0);
560 make_edge (i->exit, bb, EDGE_FALLTHRU);
563 /* Make the loopback edge to the block with
564 OMP_SECTIONS_SWITCH. */
565 make_edge (bb, switch_bb, 0);
567 /* Make the edge from the switch to exit. */
568 make_edge (switch_bb, bb->next_bb, 0);
569 fallthru = false;
571 break;
573 default:
574 gcc_unreachable ();
576 break;
578 default:
579 gcc_assert (!stmt_ends_bb_p (last));
580 fallthru = true;
583 else
584 fallthru = true;
586 if (fallthru)
587 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
590 if (root_omp_region)
591 free_omp_regions ();
593 /* Fold COND_EXPR_COND of each COND_EXPR. */
594 fold_cond_expr_cond ();
598 /* Create the edges for a COND_EXPR starting at block BB.
599 At this point, both clauses must contain only simple gotos. */
601 static void
602 make_cond_expr_edges (basic_block bb)
604 tree entry = last_stmt (bb);
605 basic_block then_bb, else_bb;
606 tree then_label, else_label;
607 edge e;
609 gcc_assert (entry);
610 gcc_assert (TREE_CODE (entry) == COND_EXPR);
612 /* Entry basic blocks for each component. */
613 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
614 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
615 then_bb = label_to_block (then_label);
616 else_bb = label_to_block (else_label);
618 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
619 #ifdef USE_MAPPED_LOCATION
620 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
621 #else
622 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
623 #endif
624 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
625 if (e)
627 #ifdef USE_MAPPED_LOCATION
628 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
629 #else
630 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
631 #endif
634 /* We do not need the gotos anymore. */
635 COND_EXPR_THEN (entry) = NULL_TREE;
636 COND_EXPR_ELSE (entry) = NULL_TREE;
640 /* Called for each element in the hash table (P) as we delete the
641 edge to cases hash table.
643 Clear all the TREE_CHAINs to prevent problems with copying of
644 SWITCH_EXPRs and structure sharing rules, then free the hash table
645 element. */
647 static bool
648 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
649 void *data ATTRIBUTE_UNUSED)
651 tree t, next;
653 for (t = (tree) *value; t; t = next)
655 next = TREE_CHAIN (t);
656 TREE_CHAIN (t) = NULL;
659 *value = NULL;
660 return false;
663 /* Start recording information mapping edges to case labels. */
665 void
666 start_recording_case_labels (void)
668 gcc_assert (edge_to_cases == NULL);
669 edge_to_cases = pointer_map_create ();
672 /* Return nonzero if we are recording information for case labels. */
674 static bool
675 recording_case_labels_p (void)
677 return (edge_to_cases != NULL);
680 /* Stop recording information mapping edges to case labels and
681 remove any information we have recorded. */
682 void
683 end_recording_case_labels (void)
685 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
686 pointer_map_destroy (edge_to_cases);
687 edge_to_cases = NULL;
690 /* If we are inside a {start,end}_recording_cases block, then return
691 a chain of CASE_LABEL_EXPRs from T which reference E.
693 Otherwise return NULL. */
695 static tree
696 get_cases_for_edge (edge e, tree t)
698 void **slot;
699 size_t i, n;
700 tree vec;
702 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
703 chains available. Return NULL so the caller can detect this case. */
704 if (!recording_case_labels_p ())
705 return NULL;
707 slot = pointer_map_contains (edge_to_cases, e);
708 if (slot)
709 return (tree) *slot;
711 /* If we did not find E in the hash table, then this must be the first
712 time we have been queried for information about E & T. Add all the
713 elements from T to the hash table then perform the query again. */
715 vec = SWITCH_LABELS (t);
716 n = TREE_VEC_LENGTH (vec);
717 for (i = 0; i < n; i++)
719 tree elt = TREE_VEC_ELT (vec, i);
720 tree lab = CASE_LABEL (elt);
721 basic_block label_bb = label_to_block (lab);
722 edge this_edge = find_edge (e->src, label_bb);
724 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
725 a new chain. */
726 slot = pointer_map_insert (edge_to_cases, this_edge);
727 TREE_CHAIN (elt) = (tree) *slot;
728 *slot = elt;
731 return (tree) *pointer_map_contains (edge_to_cases, e);
734 /* Create the edges for a SWITCH_EXPR starting at block BB.
735 At this point, the switch body has been lowered and the
736 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
738 static void
739 make_switch_expr_edges (basic_block bb)
741 tree entry = last_stmt (bb);
742 size_t i, n;
743 tree vec;
745 vec = SWITCH_LABELS (entry);
746 n = TREE_VEC_LENGTH (vec);
748 for (i = 0; i < n; ++i)
750 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
751 basic_block label_bb = label_to_block (lab);
752 make_edge (bb, label_bb, 0);
757 /* Return the basic block holding label DEST. */
759 basic_block
760 label_to_block_fn (struct function *ifun, tree dest)
762 int uid = LABEL_DECL_UID (dest);
764 /* We would die hard when faced by an undefined label. Emit a label to
765 the very first basic block. This will hopefully make even the dataflow
766 and undefined variable warnings quite right. */
767 if ((errorcount || sorrycount) && uid < 0)
769 block_stmt_iterator bsi =
770 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
771 tree stmt;
773 stmt = build1 (LABEL_EXPR, void_type_node, dest);
774 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
775 uid = LABEL_DECL_UID (dest);
777 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
778 <= (unsigned int) uid)
779 return NULL;
780 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
783 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
784 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
786 void
787 make_abnormal_goto_edges (basic_block bb, bool for_call)
789 basic_block target_bb;
790 block_stmt_iterator bsi;
792 FOR_EACH_BB (target_bb)
793 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
795 tree target = bsi_stmt (bsi);
797 if (TREE_CODE (target) != LABEL_EXPR)
798 break;
800 target = LABEL_EXPR_LABEL (target);
802 /* Make an edge to every label block that has been marked as a
803 potential target for a computed goto or a non-local goto. */
804 if ((FORCED_LABEL (target) && !for_call)
805 || (DECL_NONLOCAL (target) && for_call))
807 make_edge (bb, target_bb, EDGE_ABNORMAL);
808 break;
813 /* Create edges for a goto statement at block BB. */
815 static void
816 make_goto_expr_edges (basic_block bb)
818 block_stmt_iterator last = bsi_last (bb);
819 tree goto_t = bsi_stmt (last);
821 /* A simple GOTO creates normal edges. */
822 if (simple_goto_p (goto_t))
824 tree dest = GOTO_DESTINATION (goto_t);
825 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
826 #ifdef USE_MAPPED_LOCATION
827 e->goto_locus = EXPR_LOCATION (goto_t);
828 #else
829 e->goto_locus = EXPR_LOCUS (goto_t);
830 #endif
831 bsi_remove (&last, true);
832 return;
835 /* A computed GOTO creates abnormal edges. */
836 make_abnormal_goto_edges (bb, false);
840 /*---------------------------------------------------------------------------
841 Flowgraph analysis
842 ---------------------------------------------------------------------------*/
844 /* Cleanup useless labels in basic blocks. This is something we wish
845 to do early because it allows us to group case labels before creating
846 the edges for the CFG, and it speeds up block statement iterators in
847 all passes later on.
848 We rerun this pass after CFG is created, to get rid of the labels that
849 are no longer referenced. After then we do not run it any more, since
850 (almost) no new labels should be created. */
852 /* A map from basic block index to the leading label of that block. */
853 static struct label_record
855 /* The label. */
856 tree label;
858 /* True if the label is referenced from somewhere. */
859 bool used;
860 } *label_for_bb;
862 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
863 static void
864 update_eh_label (struct eh_region *region)
866 tree old_label = get_eh_region_tree_label (region);
867 if (old_label)
869 tree new_label;
870 basic_block bb = label_to_block (old_label);
872 /* ??? After optimizing, there may be EH regions with labels
873 that have already been removed from the function body, so
874 there is no basic block for them. */
875 if (! bb)
876 return;
878 new_label = label_for_bb[bb->index].label;
879 label_for_bb[bb->index].used = true;
880 set_eh_region_tree_label (region, new_label);
884 /* Given LABEL return the first label in the same basic block. */
885 static tree
886 main_block_label (tree label)
888 basic_block bb = label_to_block (label);
889 tree main_label = label_for_bb[bb->index].label;
891 /* label_to_block possibly inserted undefined label into the chain. */
892 if (!main_label)
894 label_for_bb[bb->index].label = label;
895 main_label = label;
898 label_for_bb[bb->index].used = true;
899 return main_label;
902 /* Cleanup redundant labels. This is a three-step process:
903 1) Find the leading label for each block.
904 2) Redirect all references to labels to the leading labels.
905 3) Cleanup all useless labels. */
907 void
908 cleanup_dead_labels (void)
910 basic_block bb;
911 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
913 /* Find a suitable label for each block. We use the first user-defined
914 label if there is one, or otherwise just the first label we see. */
915 FOR_EACH_BB (bb)
917 block_stmt_iterator i;
919 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
921 tree label, stmt = bsi_stmt (i);
923 if (TREE_CODE (stmt) != LABEL_EXPR)
924 break;
926 label = LABEL_EXPR_LABEL (stmt);
928 /* If we have not yet seen a label for the current block,
929 remember this one and see if there are more labels. */
930 if (!label_for_bb[bb->index].label)
932 label_for_bb[bb->index].label = label;
933 continue;
936 /* If we did see a label for the current block already, but it
937 is an artificially created label, replace it if the current
938 label is a user defined label. */
939 if (!DECL_ARTIFICIAL (label)
940 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
942 label_for_bb[bb->index].label = label;
943 break;
948 /* Now redirect all jumps/branches to the selected label.
949 First do so for each block ending in a control statement. */
950 FOR_EACH_BB (bb)
952 tree stmt = last_stmt (bb);
953 if (!stmt)
954 continue;
956 switch (TREE_CODE (stmt))
958 case COND_EXPR:
960 tree true_branch, false_branch;
962 true_branch = COND_EXPR_THEN (stmt);
963 false_branch = COND_EXPR_ELSE (stmt);
965 if (true_branch)
966 GOTO_DESTINATION (true_branch)
967 = main_block_label (GOTO_DESTINATION (true_branch));
968 if (false_branch)
969 GOTO_DESTINATION (false_branch)
970 = main_block_label (GOTO_DESTINATION (false_branch));
972 break;
975 case SWITCH_EXPR:
977 size_t i;
978 tree vec = SWITCH_LABELS (stmt);
979 size_t n = TREE_VEC_LENGTH (vec);
981 /* Replace all destination labels. */
982 for (i = 0; i < n; ++i)
984 tree elt = TREE_VEC_ELT (vec, i);
985 tree label = main_block_label (CASE_LABEL (elt));
986 CASE_LABEL (elt) = label;
988 break;
991 /* We have to handle GOTO_EXPRs until they're removed, and we don't
992 remove them until after we've created the CFG edges. */
993 case GOTO_EXPR:
994 if (! computed_goto_p (stmt))
996 GOTO_DESTINATION (stmt)
997 = main_block_label (GOTO_DESTINATION (stmt));
998 break;
1001 default:
1002 break;
1006 for_each_eh_region (update_eh_label);
1008 /* Finally, purge dead labels. All user-defined labels and labels that
1009 can be the target of non-local gotos and labels which have their
1010 address taken are preserved. */
1011 FOR_EACH_BB (bb)
1013 block_stmt_iterator i;
1014 tree label_for_this_bb = label_for_bb[bb->index].label;
1016 if (!label_for_this_bb)
1017 continue;
1019 /* If the main label of the block is unused, we may still remove it. */
1020 if (!label_for_bb[bb->index].used)
1021 label_for_this_bb = NULL;
1023 for (i = bsi_start (bb); !bsi_end_p (i); )
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1028 break;
1030 label = LABEL_EXPR_LABEL (stmt);
1032 if (label == label_for_this_bb
1033 || ! DECL_ARTIFICIAL (label)
1034 || DECL_NONLOCAL (label)
1035 || FORCED_LABEL (label))
1036 bsi_next (&i);
1037 else
1038 bsi_remove (&i, true);
1042 free (label_for_bb);
1045 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1046 and scan the sorted vector of cases. Combine the ones jumping to the
1047 same label.
1048 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1050 void
1051 group_case_labels (void)
1053 basic_block bb;
1055 FOR_EACH_BB (bb)
1057 tree stmt = last_stmt (bb);
1058 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1060 tree labels = SWITCH_LABELS (stmt);
1061 int old_size = TREE_VEC_LENGTH (labels);
1062 int i, j, new_size = old_size;
1063 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1064 tree default_label;
1066 /* The default label is always the last case in a switch
1067 statement after gimplification. */
1068 default_label = CASE_LABEL (default_case);
1070 /* Look for possible opportunities to merge cases.
1071 Ignore the last element of the label vector because it
1072 must be the default case. */
1073 i = 0;
1074 while (i < old_size - 1)
1076 tree base_case, base_label, base_high;
1077 base_case = TREE_VEC_ELT (labels, i);
1079 gcc_assert (base_case);
1080 base_label = CASE_LABEL (base_case);
1082 /* Discard cases that have the same destination as the
1083 default case. */
1084 if (base_label == default_label)
1086 TREE_VEC_ELT (labels, i) = NULL_TREE;
1087 i++;
1088 new_size--;
1089 continue;
1092 base_high = CASE_HIGH (base_case) ?
1093 CASE_HIGH (base_case) : CASE_LOW (base_case);
1094 i++;
1095 /* Try to merge case labels. Break out when we reach the end
1096 of the label vector or when we cannot merge the next case
1097 label with the current one. */
1098 while (i < old_size - 1)
1100 tree merge_case = TREE_VEC_ELT (labels, i);
1101 tree merge_label = CASE_LABEL (merge_case);
1102 tree t = int_const_binop (PLUS_EXPR, base_high,
1103 integer_one_node, 1);
1105 /* Merge the cases if they jump to the same place,
1106 and their ranges are consecutive. */
1107 if (merge_label == base_label
1108 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1110 base_high = CASE_HIGH (merge_case) ?
1111 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1112 CASE_HIGH (base_case) = base_high;
1113 TREE_VEC_ELT (labels, i) = NULL_TREE;
1114 new_size--;
1115 i++;
1117 else
1118 break;
1122 /* Compress the case labels in the label vector, and adjust the
1123 length of the vector. */
1124 for (i = 0, j = 0; i < new_size; i++)
1126 while (! TREE_VEC_ELT (labels, j))
1127 j++;
1128 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1130 TREE_VEC_LENGTH (labels) = new_size;
1135 /* Checks whether we can merge block B into block A. */
1137 static bool
1138 tree_can_merge_blocks_p (const_basic_block a, const_basic_block b)
1140 const_tree stmt;
1141 const_block_stmt_iterator bsi;
1142 tree phi;
1144 if (!single_succ_p (a))
1145 return false;
1147 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1148 return false;
1150 if (single_succ (a) != b)
1151 return false;
1153 if (!single_pred_p (b))
1154 return false;
1156 if (b == EXIT_BLOCK_PTR)
1157 return false;
1159 /* If A ends by a statement causing exceptions or something similar, we
1160 cannot merge the blocks. */
1161 stmt = const_last_stmt (a);
1162 if (stmt && stmt_ends_bb_p (stmt))
1163 return false;
1165 /* Do not allow a block with only a non-local label to be merged. */
1166 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1167 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1168 return false;
1170 /* It must be possible to eliminate all phi nodes in B. If ssa form
1171 is not up-to-date, we cannot eliminate any phis; however, if only
1172 some symbols as whole are marked for renaming, this is not a problem,
1173 as phi nodes for those symbols are irrelevant in updating anyway. */
1174 phi = phi_nodes (b);
1175 if (phi)
1177 if (name_mappings_registered_p ())
1178 return false;
1180 for (; phi; phi = PHI_CHAIN (phi))
1181 if (!is_gimple_reg (PHI_RESULT (phi))
1182 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1183 return false;
1186 /* Do not remove user labels. */
1187 for (bsi = cbsi_start (b); !cbsi_end_p (bsi); cbsi_next (&bsi))
1189 stmt = cbsi_stmt (bsi);
1190 if (TREE_CODE (stmt) != LABEL_EXPR)
1191 break;
1192 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1193 return false;
1196 /* Protect the loop latches. */
1197 if (current_loops
1198 && b->loop_father->latch == b)
1199 return false;
1201 return true;
1204 /* Replaces all uses of NAME by VAL. */
1206 void
1207 replace_uses_by (tree name, tree val)
1209 imm_use_iterator imm_iter;
1210 use_operand_p use;
1211 tree stmt;
1212 edge e;
1214 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1216 if (TREE_CODE (stmt) != PHI_NODE)
1217 push_stmt_changes (&stmt);
1219 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1221 replace_exp (use, val);
1223 if (TREE_CODE (stmt) == PHI_NODE)
1225 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1226 if (e->flags & EDGE_ABNORMAL)
1228 /* This can only occur for virtual operands, since
1229 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1230 would prevent replacement. */
1231 gcc_assert (!is_gimple_reg (name));
1232 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1237 if (TREE_CODE (stmt) != PHI_NODE)
1239 tree rhs;
1241 fold_stmt_inplace (stmt);
1242 if (cfgcleanup_altered_bbs)
1243 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1245 /* FIXME. This should go in pop_stmt_changes. */
1246 rhs = get_rhs (stmt);
1247 if (TREE_CODE (rhs) == ADDR_EXPR)
1248 recompute_tree_invariant_for_addr_expr (rhs);
1250 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1252 pop_stmt_changes (&stmt);
1256 gcc_assert (has_zero_uses (name));
1258 /* Also update the trees stored in loop structures. */
1259 if (current_loops)
1261 struct loop *loop;
1262 loop_iterator li;
1264 FOR_EACH_LOOP (li, loop, 0)
1266 substitute_in_loop_info (loop, name, val);
1271 /* Merge block B into block A. */
1273 static void
1274 tree_merge_blocks (basic_block a, basic_block b)
1276 block_stmt_iterator bsi;
1277 tree_stmt_iterator last;
1278 tree phi;
1280 if (dump_file)
1281 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1283 /* Remove all single-valued PHI nodes from block B of the form
1284 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1285 bsi = bsi_last (a);
1286 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1288 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1289 tree copy;
1290 bool may_replace_uses = may_propagate_copy (def, use);
1292 /* In case we maintain loop closed ssa form, do not propagate arguments
1293 of loop exit phi nodes. */
1294 if (current_loops
1295 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1296 && is_gimple_reg (def)
1297 && TREE_CODE (use) == SSA_NAME
1298 && a->loop_father != b->loop_father)
1299 may_replace_uses = false;
1301 if (!may_replace_uses)
1303 gcc_assert (is_gimple_reg (def));
1305 /* Note that just emitting the copies is fine -- there is no problem
1306 with ordering of phi nodes. This is because A is the single
1307 predecessor of B, therefore results of the phi nodes cannot
1308 appear as arguments of the phi nodes. */
1309 copy = build_gimple_modify_stmt (def, use);
1310 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1311 SSA_NAME_DEF_STMT (def) = copy;
1312 remove_phi_node (phi, NULL, false);
1314 else
1316 replace_uses_by (def, use);
1317 remove_phi_node (phi, NULL, true);
1321 /* Ensure that B follows A. */
1322 move_block_after (b, a);
1324 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1325 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1327 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1328 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1330 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1332 tree label = bsi_stmt (bsi);
1334 bsi_remove (&bsi, false);
1335 /* Now that we can thread computed gotos, we might have
1336 a situation where we have a forced label in block B
1337 However, the label at the start of block B might still be
1338 used in other ways (think about the runtime checking for
1339 Fortran assigned gotos). So we can not just delete the
1340 label. Instead we move the label to the start of block A. */
1341 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1343 block_stmt_iterator dest_bsi = bsi_start (a);
1344 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1347 else
1349 change_bb_for_stmt (bsi_stmt (bsi), a);
1350 bsi_next (&bsi);
1354 /* Merge the chains. */
1355 last = tsi_last (bb_stmt_list (a));
1356 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1357 set_bb_stmt_list (b, NULL_TREE);
1359 if (cfgcleanup_altered_bbs)
1360 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1364 /* Return the one of two successors of BB that is not reachable by a
1365 reached by a complex edge, if there is one. Else, return BB. We use
1366 this in optimizations that use post-dominators for their heuristics,
1367 to catch the cases in C++ where function calls are involved. */
1369 basic_block
1370 single_noncomplex_succ (basic_block bb)
1372 edge e0, e1;
1373 if (EDGE_COUNT (bb->succs) != 2)
1374 return bb;
1376 e0 = EDGE_SUCC (bb, 0);
1377 e1 = EDGE_SUCC (bb, 1);
1378 if (e0->flags & EDGE_COMPLEX)
1379 return e1->dest;
1380 if (e1->flags & EDGE_COMPLEX)
1381 return e0->dest;
1383 return bb;
1387 /* Walk the function tree removing unnecessary statements.
1389 * Empty statement nodes are removed
1391 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1393 * Unnecessary COND_EXPRs are removed
1395 * Some unnecessary BIND_EXPRs are removed
1397 Clearly more work could be done. The trick is doing the analysis
1398 and removal fast enough to be a net improvement in compile times.
1400 Note that when we remove a control structure such as a COND_EXPR
1401 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1402 to ensure we eliminate all the useless code. */
1404 struct rus_data
1406 tree *last_goto;
1407 bool repeat;
1408 bool may_throw;
1409 bool may_branch;
1410 bool has_label;
1413 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1415 static bool
1416 remove_useless_stmts_warn_notreached (tree stmt)
1418 if (EXPR_HAS_LOCATION (stmt))
1420 location_t loc = EXPR_LOCATION (stmt);
1421 if (LOCATION_LINE (loc) > 0)
1423 warning (0, "%Hwill never be executed", &loc);
1424 return true;
1428 switch (TREE_CODE (stmt))
1430 case STATEMENT_LIST:
1432 tree_stmt_iterator i;
1433 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1434 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1435 return true;
1437 break;
1439 case COND_EXPR:
1440 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1441 return true;
1442 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1443 return true;
1444 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1445 return true;
1446 break;
1448 case TRY_FINALLY_EXPR:
1449 case TRY_CATCH_EXPR:
1450 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1451 return true;
1452 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1453 return true;
1454 break;
1456 case CATCH_EXPR:
1457 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1458 case EH_FILTER_EXPR:
1459 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1460 case BIND_EXPR:
1461 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1463 default:
1464 /* Not a live container. */
1465 break;
1468 return false;
1471 static void
1472 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1474 tree then_clause, else_clause, cond;
1475 bool save_has_label, then_has_label, else_has_label;
1477 save_has_label = data->has_label;
1478 data->has_label = false;
1479 data->last_goto = NULL;
1481 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1483 then_has_label = data->has_label;
1484 data->has_label = false;
1485 data->last_goto = NULL;
1487 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1489 else_has_label = data->has_label;
1490 data->has_label = save_has_label | then_has_label | else_has_label;
1492 then_clause = COND_EXPR_THEN (*stmt_p);
1493 else_clause = COND_EXPR_ELSE (*stmt_p);
1494 cond = fold (COND_EXPR_COND (*stmt_p));
1496 /* If neither arm does anything at all, we can remove the whole IF. */
1497 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1499 *stmt_p = build_empty_stmt ();
1500 data->repeat = true;
1503 /* If there are no reachable statements in an arm, then we can
1504 zap the entire conditional. */
1505 else if (integer_nonzerop (cond) && !else_has_label)
1507 if (warn_notreached)
1508 remove_useless_stmts_warn_notreached (else_clause);
1509 *stmt_p = then_clause;
1510 data->repeat = true;
1512 else if (integer_zerop (cond) && !then_has_label)
1514 if (warn_notreached)
1515 remove_useless_stmts_warn_notreached (then_clause);
1516 *stmt_p = else_clause;
1517 data->repeat = true;
1520 /* Check a couple of simple things on then/else with single stmts. */
1521 else
1523 tree then_stmt = expr_only (then_clause);
1524 tree else_stmt = expr_only (else_clause);
1526 /* Notice branches to a common destination. */
1527 if (then_stmt && else_stmt
1528 && TREE_CODE (then_stmt) == GOTO_EXPR
1529 && TREE_CODE (else_stmt) == GOTO_EXPR
1530 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1532 *stmt_p = then_stmt;
1533 data->repeat = true;
1536 /* If the THEN/ELSE clause merely assigns a value to a variable or
1537 parameter which is already known to contain that value, then
1538 remove the useless THEN/ELSE clause. */
1539 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1541 if (else_stmt
1542 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1543 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1544 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1545 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1547 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1548 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1549 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1550 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1552 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1553 ? then_stmt : else_stmt);
1554 tree *location = (TREE_CODE (cond) == EQ_EXPR
1555 ? &COND_EXPR_THEN (*stmt_p)
1556 : &COND_EXPR_ELSE (*stmt_p));
1558 if (stmt
1559 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1560 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1561 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1562 *location = alloc_stmt_list ();
1566 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1567 would be re-introduced during lowering. */
1568 data->last_goto = NULL;
1572 static void
1573 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1575 bool save_may_branch, save_may_throw;
1576 bool this_may_branch, this_may_throw;
1578 /* Collect may_branch and may_throw information for the body only. */
1579 save_may_branch = data->may_branch;
1580 save_may_throw = data->may_throw;
1581 data->may_branch = false;
1582 data->may_throw = false;
1583 data->last_goto = NULL;
1585 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1587 this_may_branch = data->may_branch;
1588 this_may_throw = data->may_throw;
1589 data->may_branch |= save_may_branch;
1590 data->may_throw |= save_may_throw;
1591 data->last_goto = NULL;
1593 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1595 /* If the body is empty, then we can emit the FINALLY block without
1596 the enclosing TRY_FINALLY_EXPR. */
1597 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1599 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1600 data->repeat = true;
1603 /* If the handler is empty, then we can emit the TRY block without
1604 the enclosing TRY_FINALLY_EXPR. */
1605 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1607 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1608 data->repeat = true;
1611 /* If the body neither throws, nor branches, then we can safely
1612 string the TRY and FINALLY blocks together. */
1613 else if (!this_may_branch && !this_may_throw)
1615 tree stmt = *stmt_p;
1616 *stmt_p = TREE_OPERAND (stmt, 0);
1617 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1618 data->repeat = true;
1623 static void
1624 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1626 bool save_may_throw, this_may_throw;
1627 tree_stmt_iterator i;
1628 tree stmt;
1630 /* Collect may_throw information for the body only. */
1631 save_may_throw = data->may_throw;
1632 data->may_throw = false;
1633 data->last_goto = NULL;
1635 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1637 this_may_throw = data->may_throw;
1638 data->may_throw = save_may_throw;
1640 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1641 if (!this_may_throw)
1643 if (warn_notreached)
1644 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1645 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1646 data->repeat = true;
1647 return;
1650 /* Process the catch clause specially. We may be able to tell that
1651 no exceptions propagate past this point. */
1653 this_may_throw = true;
1654 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1655 stmt = tsi_stmt (i);
1656 data->last_goto = NULL;
1658 switch (TREE_CODE (stmt))
1660 case CATCH_EXPR:
1661 for (; !tsi_end_p (i); tsi_next (&i))
1663 stmt = tsi_stmt (i);
1664 /* If we catch all exceptions, then the body does not
1665 propagate exceptions past this point. */
1666 if (CATCH_TYPES (stmt) == NULL)
1667 this_may_throw = false;
1668 data->last_goto = NULL;
1669 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1671 break;
1673 case EH_FILTER_EXPR:
1674 if (EH_FILTER_MUST_NOT_THROW (stmt))
1675 this_may_throw = false;
1676 else if (EH_FILTER_TYPES (stmt) == NULL)
1677 this_may_throw = false;
1678 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1679 break;
1681 default:
1682 /* Otherwise this is a cleanup. */
1683 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1685 /* If the cleanup is empty, then we can emit the TRY block without
1686 the enclosing TRY_CATCH_EXPR. */
1687 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1689 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1690 data->repeat = true;
1692 break;
1694 data->may_throw |= this_may_throw;
1698 static void
1699 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1701 tree block;
1703 /* First remove anything underneath the BIND_EXPR. */
1704 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1706 /* If the BIND_EXPR has no variables, then we can pull everything
1707 up one level and remove the BIND_EXPR, unless this is the toplevel
1708 BIND_EXPR for the current function or an inlined function.
1710 When this situation occurs we will want to apply this
1711 optimization again. */
1712 block = BIND_EXPR_BLOCK (*stmt_p);
1713 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1714 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1715 && (! block
1716 || ! BLOCK_ABSTRACT_ORIGIN (block)
1717 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1718 != FUNCTION_DECL)))
1720 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1721 data->repeat = true;
1726 static void
1727 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1729 tree dest = GOTO_DESTINATION (*stmt_p);
1731 data->may_branch = true;
1732 data->last_goto = NULL;
1734 /* Record the last goto expr, so that we can delete it if unnecessary. */
1735 if (TREE_CODE (dest) == LABEL_DECL)
1736 data->last_goto = stmt_p;
1740 static void
1741 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1743 tree label = LABEL_EXPR_LABEL (*stmt_p);
1745 data->has_label = true;
1747 /* We do want to jump across non-local label receiver code. */
1748 if (DECL_NONLOCAL (label))
1749 data->last_goto = NULL;
1751 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1753 *data->last_goto = build_empty_stmt ();
1754 data->repeat = true;
1757 /* ??? Add something here to delete unused labels. */
1761 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1762 decl. This allows us to eliminate redundant or useless
1763 calls to "const" functions.
1765 Gimplifier already does the same operation, but we may notice functions
1766 being const and pure once their calls has been gimplified, so we need
1767 to update the flag. */
1769 static void
1770 update_call_expr_flags (tree call)
1772 tree decl = get_callee_fndecl (call);
1773 if (!decl)
1774 return;
1775 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1776 TREE_SIDE_EFFECTS (call) = 0;
1777 if (TREE_NOTHROW (decl))
1778 TREE_NOTHROW (call) = 1;
1782 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1784 void
1785 notice_special_calls (tree t)
1787 int flags = call_expr_flags (t);
1789 if (flags & ECF_MAY_BE_ALLOCA)
1790 current_function_calls_alloca = true;
1791 if (flags & ECF_RETURNS_TWICE)
1792 current_function_calls_setjmp = true;
1796 /* Clear flags set by notice_special_calls. Used by dead code removal
1797 to update the flags. */
1799 void
1800 clear_special_calls (void)
1802 current_function_calls_alloca = false;
1803 current_function_calls_setjmp = false;
1807 static void
1808 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1810 tree t = *tp, op;
1812 switch (TREE_CODE (t))
1814 case COND_EXPR:
1815 remove_useless_stmts_cond (tp, data);
1816 break;
1818 case TRY_FINALLY_EXPR:
1819 remove_useless_stmts_tf (tp, data);
1820 break;
1822 case TRY_CATCH_EXPR:
1823 remove_useless_stmts_tc (tp, data);
1824 break;
1826 case BIND_EXPR:
1827 remove_useless_stmts_bind (tp, data);
1828 break;
1830 case GOTO_EXPR:
1831 remove_useless_stmts_goto (tp, data);
1832 break;
1834 case LABEL_EXPR:
1835 remove_useless_stmts_label (tp, data);
1836 break;
1838 case RETURN_EXPR:
1839 fold_stmt (tp);
1840 data->last_goto = NULL;
1841 data->may_branch = true;
1842 break;
1844 case CALL_EXPR:
1845 fold_stmt (tp);
1846 data->last_goto = NULL;
1847 notice_special_calls (t);
1848 update_call_expr_flags (t);
1849 if (tree_could_throw_p (t))
1850 data->may_throw = true;
1851 break;
1853 case MODIFY_EXPR:
1854 gcc_unreachable ();
1856 case GIMPLE_MODIFY_STMT:
1857 data->last_goto = NULL;
1858 fold_stmt (tp);
1859 op = get_call_expr_in (t);
1860 if (op)
1862 update_call_expr_flags (op);
1863 notice_special_calls (op);
1865 if (tree_could_throw_p (t))
1866 data->may_throw = true;
1867 break;
1869 case STATEMENT_LIST:
1871 tree_stmt_iterator i = tsi_start (t);
1872 while (!tsi_end_p (i))
1874 t = tsi_stmt (i);
1875 if (IS_EMPTY_STMT (t))
1877 tsi_delink (&i);
1878 continue;
1881 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1883 t = tsi_stmt (i);
1884 if (TREE_CODE (t) == STATEMENT_LIST)
1886 tsi_link_before (&i, t, TSI_SAME_STMT);
1887 tsi_delink (&i);
1889 else
1890 tsi_next (&i);
1893 break;
1894 case ASM_EXPR:
1895 fold_stmt (tp);
1896 data->last_goto = NULL;
1897 break;
1899 default:
1900 data->last_goto = NULL;
1901 break;
1905 static unsigned int
1906 remove_useless_stmts (void)
1908 struct rus_data data;
1910 clear_special_calls ();
1914 memset (&data, 0, sizeof (data));
1915 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1917 while (data.repeat);
1918 return 0;
1922 struct tree_opt_pass pass_remove_useless_stmts =
1924 "useless", /* name */
1925 NULL, /* gate */
1926 remove_useless_stmts, /* execute */
1927 NULL, /* sub */
1928 NULL, /* next */
1929 0, /* static_pass_number */
1930 0, /* tv_id */
1931 PROP_gimple_any, /* properties_required */
1932 0, /* properties_provided */
1933 0, /* properties_destroyed */
1934 0, /* todo_flags_start */
1935 TODO_dump_func, /* todo_flags_finish */
1936 0 /* letter */
1939 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1941 static void
1942 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1944 tree phi;
1946 /* Since this block is no longer reachable, we can just delete all
1947 of its PHI nodes. */
1948 phi = phi_nodes (bb);
1949 while (phi)
1951 tree next = PHI_CHAIN (phi);
1952 remove_phi_node (phi, NULL_TREE, true);
1953 phi = next;
1956 /* Remove edges to BB's successors. */
1957 while (EDGE_COUNT (bb->succs) > 0)
1958 remove_edge (EDGE_SUCC (bb, 0));
1962 /* Remove statements of basic block BB. */
1964 static void
1965 remove_bb (basic_block bb)
1967 block_stmt_iterator i;
1968 #ifdef USE_MAPPED_LOCATION
1969 source_location loc = UNKNOWN_LOCATION;
1970 #else
1971 source_locus loc = 0;
1972 #endif
1974 if (dump_file)
1976 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1977 if (dump_flags & TDF_DETAILS)
1979 dump_bb (bb, dump_file, 0);
1980 fprintf (dump_file, "\n");
1984 if (current_loops)
1986 struct loop *loop = bb->loop_father;
1988 /* If a loop gets removed, clean up the information associated
1989 with it. */
1990 if (loop->latch == bb
1991 || loop->header == bb)
1992 free_numbers_of_iterations_estimates_loop (loop);
1995 /* Remove all the instructions in the block. */
1996 if (bb_stmt_list (bb) != NULL_TREE)
1998 for (i = bsi_start (bb); !bsi_end_p (i);)
2000 tree stmt = bsi_stmt (i);
2001 if (TREE_CODE (stmt) == LABEL_EXPR
2002 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2003 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2005 basic_block new_bb;
2006 block_stmt_iterator new_bsi;
2008 /* A non-reachable non-local label may still be referenced.
2009 But it no longer needs to carry the extra semantics of
2010 non-locality. */
2011 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2013 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2014 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2017 new_bb = bb->prev_bb;
2018 new_bsi = bsi_start (new_bb);
2019 bsi_remove (&i, false);
2020 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2022 else
2024 /* Release SSA definitions if we are in SSA. Note that we
2025 may be called when not in SSA. For example,
2026 final_cleanup calls this function via
2027 cleanup_tree_cfg. */
2028 if (gimple_in_ssa_p (cfun))
2029 release_defs (stmt);
2031 bsi_remove (&i, true);
2034 /* Don't warn for removed gotos. Gotos are often removed due to
2035 jump threading, thus resulting in bogus warnings. Not great,
2036 since this way we lose warnings for gotos in the original
2037 program that are indeed unreachable. */
2038 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2040 #ifdef USE_MAPPED_LOCATION
2041 if (EXPR_HAS_LOCATION (stmt))
2042 loc = EXPR_LOCATION (stmt);
2043 #else
2044 source_locus t;
2045 t = EXPR_LOCUS (stmt);
2046 if (t && LOCATION_LINE (*t) > 0)
2047 loc = t;
2048 #endif
2053 /* If requested, give a warning that the first statement in the
2054 block is unreachable. We walk statements backwards in the
2055 loop above, so the last statement we process is the first statement
2056 in the block. */
2057 #ifdef USE_MAPPED_LOCATION
2058 if (loc > BUILTINS_LOCATION)
2059 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2060 #else
2061 if (loc)
2062 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2063 #endif
2065 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2066 bb->il.tree = NULL;
2070 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2071 predicate VAL, return the edge that will be taken out of the block.
2072 If VAL does not match a unique edge, NULL is returned. */
2074 edge
2075 find_taken_edge (basic_block bb, tree val)
2077 tree stmt;
2079 stmt = last_stmt (bb);
2081 gcc_assert (stmt);
2082 gcc_assert (is_ctrl_stmt (stmt));
2083 gcc_assert (val);
2085 if (! is_gimple_min_invariant (val))
2086 return NULL;
2088 if (TREE_CODE (stmt) == COND_EXPR)
2089 return find_taken_edge_cond_expr (bb, val);
2091 if (TREE_CODE (stmt) == SWITCH_EXPR)
2092 return find_taken_edge_switch_expr (bb, val);
2094 if (computed_goto_p (stmt))
2096 /* Only optimize if the argument is a label, if the argument is
2097 not a label then we can not construct a proper CFG.
2099 It may be the case that we only need to allow the LABEL_REF to
2100 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2101 appear inside a LABEL_EXPR just to be safe. */
2102 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2103 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2104 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2105 return NULL;
2108 gcc_unreachable ();
2111 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2112 statement, determine which of the outgoing edges will be taken out of the
2113 block. Return NULL if either edge may be taken. */
2115 static edge
2116 find_taken_edge_computed_goto (basic_block bb, tree val)
2118 basic_block dest;
2119 edge e = NULL;
2121 dest = label_to_block (val);
2122 if (dest)
2124 e = find_edge (bb, dest);
2125 gcc_assert (e != NULL);
2128 return e;
2131 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2132 statement, determine which of the two edges will be taken out of the
2133 block. Return NULL if either edge may be taken. */
2135 static edge
2136 find_taken_edge_cond_expr (basic_block bb, tree val)
2138 edge true_edge, false_edge;
2140 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2142 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2143 return (integer_zerop (val) ? false_edge : true_edge);
2146 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2147 statement, determine which edge will be taken out of the block. Return
2148 NULL if any edge may be taken. */
2150 static edge
2151 find_taken_edge_switch_expr (basic_block bb, tree val)
2153 tree switch_expr, taken_case;
2154 basic_block dest_bb;
2155 edge e;
2157 switch_expr = last_stmt (bb);
2158 taken_case = find_case_label_for_value (switch_expr, val);
2159 dest_bb = label_to_block (CASE_LABEL (taken_case));
2161 e = find_edge (bb, dest_bb);
2162 gcc_assert (e);
2163 return e;
2167 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2168 We can make optimal use here of the fact that the case labels are
2169 sorted: We can do a binary search for a case matching VAL. */
2171 static tree
2172 find_case_label_for_value (tree switch_expr, tree val)
2174 tree vec = SWITCH_LABELS (switch_expr);
2175 size_t low, high, n = TREE_VEC_LENGTH (vec);
2176 tree default_case = TREE_VEC_ELT (vec, n - 1);
2178 for (low = -1, high = n - 1; high - low > 1; )
2180 size_t i = (high + low) / 2;
2181 tree t = TREE_VEC_ELT (vec, i);
2182 int cmp;
2184 /* Cache the result of comparing CASE_LOW and val. */
2185 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2187 if (cmp > 0)
2188 high = i;
2189 else
2190 low = i;
2192 if (CASE_HIGH (t) == NULL)
2194 /* A singe-valued case label. */
2195 if (cmp == 0)
2196 return t;
2198 else
2200 /* A case range. We can only handle integer ranges. */
2201 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2202 return t;
2206 return default_case;
2212 /*---------------------------------------------------------------------------
2213 Debugging functions
2214 ---------------------------------------------------------------------------*/
2216 /* Dump tree-specific information of block BB to file OUTF. */
2218 void
2219 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2221 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2225 /* Dump a basic block on stderr. */
2227 void
2228 debug_tree_bb (basic_block bb)
2230 dump_bb (bb, stderr, 0);
2234 /* Dump basic block with index N on stderr. */
2236 basic_block
2237 debug_tree_bb_n (int n)
2239 debug_tree_bb (BASIC_BLOCK (n));
2240 return BASIC_BLOCK (n);
2244 /* Dump the CFG on stderr.
2246 FLAGS are the same used by the tree dumping functions
2247 (see TDF_* in tree-pass.h). */
2249 void
2250 debug_tree_cfg (int flags)
2252 dump_tree_cfg (stderr, flags);
2256 /* Dump the program showing basic block boundaries on the given FILE.
2258 FLAGS are the same used by the tree dumping functions (see TDF_* in
2259 tree.h). */
2261 void
2262 dump_tree_cfg (FILE *file, int flags)
2264 if (flags & TDF_DETAILS)
2266 const char *funcname
2267 = lang_hooks.decl_printable_name (current_function_decl, 2);
2269 fputc ('\n', file);
2270 fprintf (file, ";; Function %s\n\n", funcname);
2271 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2272 n_basic_blocks, n_edges, last_basic_block);
2274 brief_dump_cfg (file);
2275 fprintf (file, "\n");
2278 if (flags & TDF_STATS)
2279 dump_cfg_stats (file);
2281 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2285 /* Dump CFG statistics on FILE. */
2287 void
2288 dump_cfg_stats (FILE *file)
2290 static long max_num_merged_labels = 0;
2291 unsigned long size, total = 0;
2292 long num_edges;
2293 basic_block bb;
2294 const char * const fmt_str = "%-30s%-13s%12s\n";
2295 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2296 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2297 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2298 const char *funcname
2299 = lang_hooks.decl_printable_name (current_function_decl, 2);
2302 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2304 fprintf (file, "---------------------------------------------------------\n");
2305 fprintf (file, fmt_str, "", " Number of ", "Memory");
2306 fprintf (file, fmt_str, "", " instances ", "used ");
2307 fprintf (file, "---------------------------------------------------------\n");
2309 size = n_basic_blocks * sizeof (struct basic_block_def);
2310 total += size;
2311 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2312 SCALE (size), LABEL (size));
2314 num_edges = 0;
2315 FOR_EACH_BB (bb)
2316 num_edges += EDGE_COUNT (bb->succs);
2317 size = num_edges * sizeof (struct edge_def);
2318 total += size;
2319 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2321 fprintf (file, "---------------------------------------------------------\n");
2322 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2323 LABEL (total));
2324 fprintf (file, "---------------------------------------------------------\n");
2325 fprintf (file, "\n");
2327 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2328 max_num_merged_labels = cfg_stats.num_merged_labels;
2330 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2331 cfg_stats.num_merged_labels, max_num_merged_labels);
2333 fprintf (file, "\n");
2337 /* Dump CFG statistics on stderr. Keep extern so that it's always
2338 linked in the final executable. */
2340 void
2341 debug_cfg_stats (void)
2343 dump_cfg_stats (stderr);
2347 /* Dump the flowgraph to a .vcg FILE. */
2349 static void
2350 tree_cfg2vcg (FILE *file)
2352 edge e;
2353 edge_iterator ei;
2354 basic_block bb;
2355 const char *funcname
2356 = lang_hooks.decl_printable_name (current_function_decl, 2);
2358 /* Write the file header. */
2359 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2360 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2361 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2363 /* Write blocks and edges. */
2364 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2366 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2367 e->dest->index);
2369 if (e->flags & EDGE_FAKE)
2370 fprintf (file, " linestyle: dotted priority: 10");
2371 else
2372 fprintf (file, " linestyle: solid priority: 100");
2374 fprintf (file, " }\n");
2376 fputc ('\n', file);
2378 FOR_EACH_BB (bb)
2380 enum tree_code head_code, end_code;
2381 const char *head_name, *end_name;
2382 int head_line = 0;
2383 int end_line = 0;
2384 tree first = first_stmt (bb);
2385 tree last = last_stmt (bb);
2387 if (first)
2389 head_code = TREE_CODE (first);
2390 head_name = tree_code_name[head_code];
2391 head_line = get_lineno (first);
2393 else
2394 head_name = "no-statement";
2396 if (last)
2398 end_code = TREE_CODE (last);
2399 end_name = tree_code_name[end_code];
2400 end_line = get_lineno (last);
2402 else
2403 end_name = "no-statement";
2405 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2406 bb->index, bb->index, head_name, head_line, end_name,
2407 end_line);
2409 FOR_EACH_EDGE (e, ei, bb->succs)
2411 if (e->dest == EXIT_BLOCK_PTR)
2412 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2413 else
2414 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2416 if (e->flags & EDGE_FAKE)
2417 fprintf (file, " priority: 10 linestyle: dotted");
2418 else
2419 fprintf (file, " priority: 100 linestyle: solid");
2421 fprintf (file, " }\n");
2424 if (bb->next_bb != EXIT_BLOCK_PTR)
2425 fputc ('\n', file);
2428 fputs ("}\n\n", file);
2433 /*---------------------------------------------------------------------------
2434 Miscellaneous helpers
2435 ---------------------------------------------------------------------------*/
2437 /* Return true if T represents a stmt that always transfers control. */
2439 bool
2440 is_ctrl_stmt (const_tree t)
2442 return (TREE_CODE (t) == COND_EXPR
2443 || TREE_CODE (t) == SWITCH_EXPR
2444 || TREE_CODE (t) == GOTO_EXPR
2445 || TREE_CODE (t) == RETURN_EXPR
2446 || TREE_CODE (t) == RESX_EXPR);
2450 /* Return true if T is a statement that may alter the flow of control
2451 (e.g., a call to a non-returning function). */
2453 bool
2454 is_ctrl_altering_stmt (const_tree t)
2456 const_tree call;
2458 gcc_assert (t);
2459 call = const_get_call_expr_in (t);
2460 if (call)
2462 /* A non-pure/const CALL_EXPR alters flow control if the current
2463 function has nonlocal labels. */
2464 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2465 return true;
2467 /* A CALL_EXPR also alters control flow if it does not return. */
2468 if (call_expr_flags (call) & ECF_NORETURN)
2469 return true;
2472 /* OpenMP directives alter control flow. */
2473 if (OMP_DIRECTIVE_P (t))
2474 return true;
2476 /* If a statement can throw, it alters control flow. */
2477 return tree_can_throw_internal (t);
2481 /* Return true if T is a computed goto. */
2483 bool
2484 computed_goto_p (const_tree t)
2486 return (TREE_CODE (t) == GOTO_EXPR
2487 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2491 /* Return true if T is a simple local goto. */
2493 bool
2494 simple_goto_p (const_tree t)
2496 return (TREE_CODE (t) == GOTO_EXPR
2497 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2501 /* Return true if T can make an abnormal transfer of control flow.
2502 Transfers of control flow associated with EH are excluded. */
2504 bool
2505 tree_can_make_abnormal_goto (const_tree t)
2507 if (computed_goto_p (t))
2508 return true;
2509 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2510 t = GIMPLE_STMT_OPERAND (t, 1);
2511 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2512 t = TREE_OPERAND (t, 0);
2513 if (TREE_CODE (t) == CALL_EXPR)
2514 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2515 return false;
2519 /* Return true if T should start a new basic block. PREV_T is the
2520 statement preceding T. It is used when T is a label or a case label.
2521 Labels should only start a new basic block if their previous statement
2522 wasn't a label. Otherwise, sequence of labels would generate
2523 unnecessary basic blocks that only contain a single label. */
2525 static inline bool
2526 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2528 if (t == NULL_TREE)
2529 return false;
2531 /* LABEL_EXPRs start a new basic block only if the preceding
2532 statement wasn't a label of the same type. This prevents the
2533 creation of consecutive blocks that have nothing but a single
2534 label. */
2535 if (TREE_CODE (t) == LABEL_EXPR)
2537 /* Nonlocal and computed GOTO targets always start a new block. */
2538 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2539 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2540 return true;
2542 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2544 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2545 return true;
2547 cfg_stats.num_merged_labels++;
2548 return false;
2550 else
2551 return true;
2554 return false;
2558 /* Return true if T should end a basic block. */
2560 bool
2561 stmt_ends_bb_p (const_tree t)
2563 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2566 /* Remove block annotations and other datastructures. */
2568 void
2569 delete_tree_cfg_annotations (void)
2571 basic_block bb;
2572 block_stmt_iterator bsi;
2574 /* Remove annotations from every tree in the function. */
2575 FOR_EACH_BB (bb)
2576 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2578 tree stmt = bsi_stmt (bsi);
2579 ggc_free (stmt->base.ann);
2580 stmt->base.ann = NULL;
2582 label_to_block_map = NULL;
2586 /* Return the first statement in basic block BB. */
2588 tree
2589 first_stmt (basic_block bb)
2591 block_stmt_iterator i = bsi_start (bb);
2592 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2595 const_tree
2596 const_first_stmt (const_basic_block bb)
2598 const_block_stmt_iterator i = cbsi_start (bb);
2599 return !cbsi_end_p (i) ? cbsi_stmt (i) : NULL_TREE;
2602 /* Return the last statement in basic block BB. */
2604 tree
2605 last_stmt (basic_block bb)
2607 block_stmt_iterator b = bsi_last (bb);
2608 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2611 const_tree
2612 const_last_stmt (const_basic_block bb)
2614 const_block_stmt_iterator b = cbsi_last (bb);
2615 return !cbsi_end_p (b) ? cbsi_stmt (b) : NULL_TREE;
2618 /* Return the last statement of an otherwise empty block. Return NULL
2619 if the block is totally empty, or if it contains more than one
2620 statement. */
2622 tree
2623 last_and_only_stmt (basic_block bb)
2625 block_stmt_iterator i = bsi_last (bb);
2626 tree last, prev;
2628 if (bsi_end_p (i))
2629 return NULL_TREE;
2631 last = bsi_stmt (i);
2632 bsi_prev (&i);
2633 if (bsi_end_p (i))
2634 return last;
2636 /* Empty statements should no longer appear in the instruction stream.
2637 Everything that might have appeared before should be deleted by
2638 remove_useless_stmts, and the optimizers should just bsi_remove
2639 instead of smashing with build_empty_stmt.
2641 Thus the only thing that should appear here in a block containing
2642 one executable statement is a label. */
2643 prev = bsi_stmt (i);
2644 if (TREE_CODE (prev) == LABEL_EXPR)
2645 return last;
2646 else
2647 return NULL_TREE;
2651 /* Mark BB as the basic block holding statement T. */
2653 void
2654 set_bb_for_stmt (tree t, basic_block bb)
2656 if (TREE_CODE (t) == PHI_NODE)
2657 PHI_BB (t) = bb;
2658 else if (TREE_CODE (t) == STATEMENT_LIST)
2660 tree_stmt_iterator i;
2661 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2662 set_bb_for_stmt (tsi_stmt (i), bb);
2664 else
2666 stmt_ann_t ann = get_stmt_ann (t);
2667 ann->bb = bb;
2669 /* If the statement is a label, add the label to block-to-labels map
2670 so that we can speed up edge creation for GOTO_EXPRs. */
2671 if (TREE_CODE (t) == LABEL_EXPR)
2673 int uid;
2675 t = LABEL_EXPR_LABEL (t);
2676 uid = LABEL_DECL_UID (t);
2677 if (uid == -1)
2679 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2680 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2681 if (old_len <= (unsigned) uid)
2683 unsigned new_len = 3 * uid / 2;
2685 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2686 new_len);
2689 else
2690 /* We're moving an existing label. Make sure that we've
2691 removed it from the old block. */
2692 gcc_assert (!bb
2693 || !VEC_index (basic_block, label_to_block_map, uid));
2694 VEC_replace (basic_block, label_to_block_map, uid, bb);
2699 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2700 from one basic block to another.
2701 For BB splitting we can run into quadratic case, so performance is quite
2702 important and knowing that the tables are big enough, change_bb_for_stmt
2703 can inline as leaf function. */
2704 static inline void
2705 change_bb_for_stmt (tree t, basic_block bb)
2707 get_stmt_ann (t)->bb = bb;
2708 if (TREE_CODE (t) == LABEL_EXPR)
2709 VEC_replace (basic_block, label_to_block_map,
2710 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2713 /* Finds iterator for STMT. */
2715 extern block_stmt_iterator
2716 bsi_for_stmt (tree stmt)
2718 block_stmt_iterator bsi;
2720 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2721 if (bsi_stmt (bsi) == stmt)
2722 return bsi;
2724 gcc_unreachable ();
2727 /* Mark statement T as modified, and update it. */
2728 static inline void
2729 update_modified_stmts (tree t)
2731 if (!ssa_operands_active ())
2732 return;
2733 if (TREE_CODE (t) == STATEMENT_LIST)
2735 tree_stmt_iterator i;
2736 tree stmt;
2737 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2739 stmt = tsi_stmt (i);
2740 update_stmt_if_modified (stmt);
2743 else
2744 update_stmt_if_modified (t);
2747 /* Insert statement (or statement list) T before the statement
2748 pointed-to by iterator I. M specifies how to update iterator I
2749 after insertion (see enum bsi_iterator_update). */
2751 void
2752 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2754 set_bb_for_stmt (t, i->bb);
2755 update_modified_stmts (t);
2756 tsi_link_before (&i->tsi, t, m);
2760 /* Insert statement (or statement list) T after the statement
2761 pointed-to by iterator I. M specifies how to update iterator I
2762 after insertion (see enum bsi_iterator_update). */
2764 void
2765 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2767 set_bb_for_stmt (t, i->bb);
2768 update_modified_stmts (t);
2769 tsi_link_after (&i->tsi, t, m);
2773 /* Remove the statement pointed to by iterator I. The iterator is updated
2774 to the next statement.
2776 When REMOVE_EH_INFO is true we remove the statement pointed to by
2777 iterator I from the EH tables. Otherwise we do not modify the EH
2778 tables.
2780 Generally, REMOVE_EH_INFO should be true when the statement is going to
2781 be removed from the IL and not reinserted elsewhere. */
2783 void
2784 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2786 tree t = bsi_stmt (*i);
2787 set_bb_for_stmt (t, NULL);
2788 delink_stmt_imm_use (t);
2789 tsi_delink (&i->tsi);
2790 mark_stmt_modified (t);
2791 if (remove_eh_info)
2793 remove_stmt_from_eh_region (t);
2794 gimple_remove_stmt_histograms (cfun, t);
2799 /* Move the statement at FROM so it comes right after the statement at TO. */
2801 void
2802 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2804 tree stmt = bsi_stmt (*from);
2805 bsi_remove (from, false);
2806 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2807 move statements to an empty block. */
2808 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2812 /* Move the statement at FROM so it comes right before the statement at TO. */
2814 void
2815 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2817 tree stmt = bsi_stmt (*from);
2818 bsi_remove (from, false);
2819 /* For consistency with bsi_move_after, it might be better to have
2820 BSI_NEW_STMT here; however, that breaks several places that expect
2821 that TO does not change. */
2822 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2826 /* Move the statement at FROM to the end of basic block BB. */
2828 void
2829 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2831 block_stmt_iterator last = bsi_last (bb);
2833 /* Have to check bsi_end_p because it could be an empty block. */
2834 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2835 bsi_move_before (from, &last);
2836 else
2837 bsi_move_after (from, &last);
2841 /* Replace the contents of the statement pointed to by iterator BSI
2842 with STMT. If UPDATE_EH_INFO is true, the exception handling
2843 information of the original statement is moved to the new statement. */
2845 void
2846 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2848 int eh_region;
2849 tree orig_stmt = bsi_stmt (*bsi);
2851 if (stmt == orig_stmt)
2852 return;
2853 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2854 set_bb_for_stmt (stmt, bsi->bb);
2856 /* Preserve EH region information from the original statement, if
2857 requested by the caller. */
2858 if (update_eh_info)
2860 eh_region = lookup_stmt_eh_region (orig_stmt);
2861 if (eh_region >= 0)
2863 remove_stmt_from_eh_region (orig_stmt);
2864 add_stmt_to_eh_region (stmt, eh_region);
2868 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2869 gimple_remove_stmt_histograms (cfun, orig_stmt);
2870 delink_stmt_imm_use (orig_stmt);
2871 *bsi_stmt_ptr (*bsi) = stmt;
2872 mark_stmt_modified (stmt);
2873 update_modified_stmts (stmt);
2877 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2878 is made to place the statement in an existing basic block, but
2879 sometimes that isn't possible. When it isn't possible, the edge is
2880 split and the statement is added to the new block.
2882 In all cases, the returned *BSI points to the correct location. The
2883 return value is true if insertion should be done after the location,
2884 or false if it should be done before the location. If new basic block
2885 has to be created, it is stored in *NEW_BB. */
2887 static bool
2888 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2889 basic_block *new_bb)
2891 basic_block dest, src;
2892 tree tmp;
2894 dest = e->dest;
2895 restart:
2897 /* If the destination has one predecessor which has no PHI nodes,
2898 insert there. Except for the exit block.
2900 The requirement for no PHI nodes could be relaxed. Basically we
2901 would have to examine the PHIs to prove that none of them used
2902 the value set by the statement we want to insert on E. That
2903 hardly seems worth the effort. */
2904 if (single_pred_p (dest)
2905 && ! phi_nodes (dest)
2906 && dest != EXIT_BLOCK_PTR)
2908 *bsi = bsi_start (dest);
2909 if (bsi_end_p (*bsi))
2910 return true;
2912 /* Make sure we insert after any leading labels. */
2913 tmp = bsi_stmt (*bsi);
2914 while (TREE_CODE (tmp) == LABEL_EXPR)
2916 bsi_next (bsi);
2917 if (bsi_end_p (*bsi))
2918 break;
2919 tmp = bsi_stmt (*bsi);
2922 if (bsi_end_p (*bsi))
2924 *bsi = bsi_last (dest);
2925 return true;
2927 else
2928 return false;
2931 /* If the source has one successor, the edge is not abnormal and
2932 the last statement does not end a basic block, insert there.
2933 Except for the entry block. */
2934 src = e->src;
2935 if ((e->flags & EDGE_ABNORMAL) == 0
2936 && single_succ_p (src)
2937 && src != ENTRY_BLOCK_PTR)
2939 *bsi = bsi_last (src);
2940 if (bsi_end_p (*bsi))
2941 return true;
2943 tmp = bsi_stmt (*bsi);
2944 if (!stmt_ends_bb_p (tmp))
2945 return true;
2947 /* Insert code just before returning the value. We may need to decompose
2948 the return in the case it contains non-trivial operand. */
2949 if (TREE_CODE (tmp) == RETURN_EXPR)
2951 tree op = TREE_OPERAND (tmp, 0);
2952 if (op && !is_gimple_val (op))
2954 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2955 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2956 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2958 bsi_prev (bsi);
2959 return true;
2963 /* Otherwise, create a new basic block, and split this edge. */
2964 dest = split_edge (e);
2965 if (new_bb)
2966 *new_bb = dest;
2967 e = single_pred_edge (dest);
2968 goto restart;
2972 /* This routine will commit all pending edge insertions, creating any new
2973 basic blocks which are necessary. */
2975 void
2976 bsi_commit_edge_inserts (void)
2978 basic_block bb;
2979 edge e;
2980 edge_iterator ei;
2982 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2984 FOR_EACH_BB (bb)
2985 FOR_EACH_EDGE (e, ei, bb->succs)
2986 bsi_commit_one_edge_insert (e, NULL);
2990 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2991 to this block, otherwise set it to NULL. */
2993 void
2994 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2996 if (new_bb)
2997 *new_bb = NULL;
2998 if (PENDING_STMT (e))
3000 block_stmt_iterator bsi;
3001 tree stmt = PENDING_STMT (e);
3003 PENDING_STMT (e) = NULL_TREE;
3005 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3006 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3007 else
3008 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3013 /* Add STMT to the pending list of edge E. No actual insertion is
3014 made until a call to bsi_commit_edge_inserts () is made. */
3016 void
3017 bsi_insert_on_edge (edge e, tree stmt)
3019 append_to_statement_list (stmt, &PENDING_STMT (e));
3022 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3023 block has to be created, it is returned. */
3025 basic_block
3026 bsi_insert_on_edge_immediate (edge e, tree stmt)
3028 block_stmt_iterator bsi;
3029 basic_block new_bb = NULL;
3031 gcc_assert (!PENDING_STMT (e));
3033 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3034 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3035 else
3036 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3038 return new_bb;
3041 /*---------------------------------------------------------------------------
3042 Tree specific functions for CFG manipulation
3043 ---------------------------------------------------------------------------*/
3045 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3047 static void
3048 reinstall_phi_args (edge new_edge, edge old_edge)
3050 tree var, phi;
3052 if (!PENDING_STMT (old_edge))
3053 return;
3055 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3056 var && phi;
3057 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3059 tree result = TREE_PURPOSE (var);
3060 tree arg = TREE_VALUE (var);
3062 gcc_assert (result == PHI_RESULT (phi));
3064 add_phi_arg (phi, arg, new_edge);
3067 PENDING_STMT (old_edge) = NULL;
3070 /* Returns the basic block after which the new basic block created
3071 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3072 near its "logical" location. This is of most help to humans looking
3073 at debugging dumps. */
3075 static basic_block
3076 split_edge_bb_loc (edge edge_in)
3078 basic_block dest = edge_in->dest;
3080 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3081 return edge_in->src;
3082 else
3083 return dest->prev_bb;
3086 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3087 Abort on abnormal edges. */
3089 static basic_block
3090 tree_split_edge (edge edge_in)
3092 basic_block new_bb, after_bb, dest;
3093 edge new_edge, e;
3095 /* Abnormal edges cannot be split. */
3096 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3098 dest = edge_in->dest;
3100 after_bb = split_edge_bb_loc (edge_in);
3102 new_bb = create_empty_bb (after_bb);
3103 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3104 new_bb->count = edge_in->count;
3105 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3106 new_edge->probability = REG_BR_PROB_BASE;
3107 new_edge->count = edge_in->count;
3109 e = redirect_edge_and_branch (edge_in, new_bb);
3110 gcc_assert (e == edge_in);
3111 reinstall_phi_args (new_edge, e);
3113 return new_bb;
3116 /* Callback for walk_tree, check that all elements with address taken are
3117 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3118 inside a PHI node. */
3120 static tree
3121 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3123 tree t = *tp, x;
3124 bool in_phi = (data != NULL);
3126 if (TYPE_P (t))
3127 *walk_subtrees = 0;
3129 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3130 #define CHECK_OP(N, MSG) \
3131 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3132 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3134 switch (TREE_CODE (t))
3136 case SSA_NAME:
3137 if (SSA_NAME_IN_FREE_LIST (t))
3139 error ("SSA name in freelist but still referenced");
3140 return *tp;
3142 break;
3144 case ASSERT_EXPR:
3145 x = fold (ASSERT_EXPR_COND (t));
3146 if (x == boolean_false_node)
3148 error ("ASSERT_EXPR with an always-false condition");
3149 return *tp;
3151 break;
3153 case MODIFY_EXPR:
3154 gcc_unreachable ();
3156 case GIMPLE_MODIFY_STMT:
3157 x = GIMPLE_STMT_OPERAND (t, 0);
3158 if (TREE_CODE (x) == BIT_FIELD_REF
3159 && is_gimple_reg (TREE_OPERAND (x, 0)))
3161 error ("GIMPLE register modified with BIT_FIELD_REF");
3162 return t;
3164 break;
3166 case ADDR_EXPR:
3168 bool old_invariant;
3169 bool old_constant;
3170 bool old_side_effects;
3171 bool new_invariant;
3172 bool new_constant;
3173 bool new_side_effects;
3175 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3176 dead PHIs that take the address of something. But if the PHI
3177 result is dead, the fact that it takes the address of anything
3178 is irrelevant. Because we can not tell from here if a PHI result
3179 is dead, we just skip this check for PHIs altogether. This means
3180 we may be missing "valid" checks, but what can you do?
3181 This was PR19217. */
3182 if (in_phi)
3183 break;
3185 old_invariant = TREE_INVARIANT (t);
3186 old_constant = TREE_CONSTANT (t);
3187 old_side_effects = TREE_SIDE_EFFECTS (t);
3189 recompute_tree_invariant_for_addr_expr (t);
3190 new_invariant = TREE_INVARIANT (t);
3191 new_side_effects = TREE_SIDE_EFFECTS (t);
3192 new_constant = TREE_CONSTANT (t);
3194 if (old_invariant != new_invariant)
3196 error ("invariant not recomputed when ADDR_EXPR changed");
3197 return t;
3200 if (old_constant != new_constant)
3202 error ("constant not recomputed when ADDR_EXPR changed");
3203 return t;
3205 if (old_side_effects != new_side_effects)
3207 error ("side effects not recomputed when ADDR_EXPR changed");
3208 return t;
3211 /* Skip any references (they will be checked when we recurse down the
3212 tree) and ensure that any variable used as a prefix is marked
3213 addressable. */
3214 for (x = TREE_OPERAND (t, 0);
3215 handled_component_p (x);
3216 x = TREE_OPERAND (x, 0))
3219 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3220 return NULL;
3221 if (!TREE_ADDRESSABLE (x))
3223 error ("address taken, but ADDRESSABLE bit not set");
3224 return x;
3226 break;
3229 case COND_EXPR:
3230 x = COND_EXPR_COND (t);
3231 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3233 error ("non-integral used in condition");
3234 return x;
3236 if (!is_gimple_condexpr (x))
3238 error ("invalid conditional operand");
3239 return x;
3241 break;
3243 case NOP_EXPR:
3244 case CONVERT_EXPR:
3245 case FIX_TRUNC_EXPR:
3246 case FLOAT_EXPR:
3247 case NEGATE_EXPR:
3248 case ABS_EXPR:
3249 case BIT_NOT_EXPR:
3250 case NON_LVALUE_EXPR:
3251 case TRUTH_NOT_EXPR:
3252 CHECK_OP (0, "invalid operand to unary operator");
3253 break;
3255 case REALPART_EXPR:
3256 case IMAGPART_EXPR:
3257 case COMPONENT_REF:
3258 case ARRAY_REF:
3259 case ARRAY_RANGE_REF:
3260 case BIT_FIELD_REF:
3261 case VIEW_CONVERT_EXPR:
3262 /* We have a nest of references. Verify that each of the operands
3263 that determine where to reference is either a constant or a variable,
3264 verify that the base is valid, and then show we've already checked
3265 the subtrees. */
3266 while (handled_component_p (t))
3268 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3269 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3270 else if (TREE_CODE (t) == ARRAY_REF
3271 || TREE_CODE (t) == ARRAY_RANGE_REF)
3273 CHECK_OP (1, "invalid array index");
3274 if (TREE_OPERAND (t, 2))
3275 CHECK_OP (2, "invalid array lower bound");
3276 if (TREE_OPERAND (t, 3))
3277 CHECK_OP (3, "invalid array stride");
3279 else if (TREE_CODE (t) == BIT_FIELD_REF)
3281 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3282 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3285 t = TREE_OPERAND (t, 0);
3288 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3290 error ("invalid reference prefix");
3291 return t;
3293 *walk_subtrees = 0;
3294 break;
3295 case PLUS_EXPR:
3296 case MINUS_EXPR:
3297 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3298 POINTER_PLUS_EXPR. */
3299 if (POINTER_TYPE_P (TREE_TYPE (t)))
3301 error ("invalid operand to plus/minus, type is a pointer");
3302 return t;
3304 CHECK_OP (0, "invalid operand to binary operator");
3305 CHECK_OP (1, "invalid operand to binary operator");
3306 break;
3308 case POINTER_PLUS_EXPR:
3309 /* Check to make sure the first operand is a pointer or reference type. */
3310 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3312 error ("invalid operand to pointer plus, first operand is not a pointer");
3313 return t;
3315 /* Check to make sure the second operand is an integer with type of
3316 sizetype. */
3317 if (!useless_type_conversion_p (sizetype,
3318 TREE_TYPE (TREE_OPERAND (t, 1))))
3320 error ("invalid operand to pointer plus, second operand is not an "
3321 "integer with type of sizetype.");
3322 return t;
3324 /* FALLTHROUGH */
3325 case LT_EXPR:
3326 case LE_EXPR:
3327 case GT_EXPR:
3328 case GE_EXPR:
3329 case EQ_EXPR:
3330 case NE_EXPR:
3331 case UNORDERED_EXPR:
3332 case ORDERED_EXPR:
3333 case UNLT_EXPR:
3334 case UNLE_EXPR:
3335 case UNGT_EXPR:
3336 case UNGE_EXPR:
3337 case UNEQ_EXPR:
3338 case LTGT_EXPR:
3339 case MULT_EXPR:
3340 case TRUNC_DIV_EXPR:
3341 case CEIL_DIV_EXPR:
3342 case FLOOR_DIV_EXPR:
3343 case ROUND_DIV_EXPR:
3344 case TRUNC_MOD_EXPR:
3345 case CEIL_MOD_EXPR:
3346 case FLOOR_MOD_EXPR:
3347 case ROUND_MOD_EXPR:
3348 case RDIV_EXPR:
3349 case EXACT_DIV_EXPR:
3350 case MIN_EXPR:
3351 case MAX_EXPR:
3352 case LSHIFT_EXPR:
3353 case RSHIFT_EXPR:
3354 case LROTATE_EXPR:
3355 case RROTATE_EXPR:
3356 case BIT_IOR_EXPR:
3357 case BIT_XOR_EXPR:
3358 case BIT_AND_EXPR:
3359 CHECK_OP (0, "invalid operand to binary operator");
3360 CHECK_OP (1, "invalid operand to binary operator");
3361 break;
3363 case CONSTRUCTOR:
3364 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3365 *walk_subtrees = 0;
3366 break;
3368 default:
3369 break;
3371 return NULL;
3373 #undef CHECK_OP
3376 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3377 if there is an error, otherwise false. */
3379 static bool
3380 verify_gimple_unary_expr (const_tree expr)
3382 tree op = TREE_OPERAND (expr, 0);
3383 tree type = TREE_TYPE (expr);
3385 if (!is_gimple_val (op))
3387 error ("invalid operand in unary expression");
3388 return true;
3391 /* For general unary expressions we have the operations type
3392 as the effective type the operation is carried out on. So all
3393 we need to require is that the operand is trivially convertible
3394 to that type. */
3395 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3397 error ("type mismatch in unary expression");
3398 debug_generic_expr (type);
3399 debug_generic_expr (TREE_TYPE (op));
3400 return true;
3403 return false;
3406 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3407 if there is an error, otherwise false. */
3409 static bool
3410 verify_gimple_binary_expr (const_tree expr)
3412 tree op0 = TREE_OPERAND (expr, 0);
3413 tree op1 = TREE_OPERAND (expr, 1);
3414 tree type = TREE_TYPE (expr);
3416 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3418 error ("invalid operands in binary expression");
3419 return true;
3422 /* For general binary expressions we have the operations type
3423 as the effective type the operation is carried out on. So all
3424 we need to require is that both operands are trivially convertible
3425 to that type. */
3426 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3427 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3429 error ("type mismatch in binary expression");
3430 debug_generic_stmt (type);
3431 debug_generic_stmt (TREE_TYPE (op0));
3432 debug_generic_stmt (TREE_TYPE (op1));
3433 return true;
3436 return false;
3439 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3440 Returns true if there is an error, otherwise false. */
3442 static bool
3443 verify_gimple_min_lval (tree expr)
3445 tree op;
3447 if (is_gimple_id (expr))
3448 return false;
3450 if (TREE_CODE (expr) != INDIRECT_REF
3451 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3452 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3454 error ("invalid expression for min lvalue");
3455 return true;
3458 op = TREE_OPERAND (expr, 0);
3459 if (!is_gimple_val (op))
3461 error ("invalid operand in indirect reference");
3462 debug_generic_stmt (op);
3463 return true;
3465 if (!useless_type_conversion_p (TREE_TYPE (expr),
3466 TREE_TYPE (TREE_TYPE (op))))
3468 error ("type mismatch in indirect reference");
3469 debug_generic_stmt (TREE_TYPE (expr));
3470 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3471 return true;
3474 return false;
3477 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3478 if there is an error, otherwise false. */
3480 static bool
3481 verify_gimple_reference (tree expr)
3483 while (handled_component_p (expr))
3485 tree op = TREE_OPERAND (expr, 0);
3487 if (TREE_CODE (expr) == ARRAY_REF
3488 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3490 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3491 || (TREE_OPERAND (expr, 2)
3492 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3493 || (TREE_OPERAND (expr, 3)
3494 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3496 error ("invalid operands to array reference");
3497 debug_generic_stmt (expr);
3498 return true;
3502 /* Verify if the reference array element types are compatible. */
3503 if (TREE_CODE (expr) == ARRAY_REF
3504 && !useless_type_conversion_p (TREE_TYPE (expr),
3505 TREE_TYPE (TREE_TYPE (op))))
3507 error ("type mismatch in array reference");
3508 debug_generic_stmt (TREE_TYPE (expr));
3509 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3510 return true;
3512 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3513 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3514 TREE_TYPE (TREE_TYPE (op))))
3516 error ("type mismatch in array range reference");
3517 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3518 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3519 return true;
3522 if ((TREE_CODE (expr) == REALPART_EXPR
3523 || TREE_CODE (expr) == IMAGPART_EXPR)
3524 && !useless_type_conversion_p (TREE_TYPE (expr),
3525 TREE_TYPE (TREE_TYPE (op))))
3527 error ("type mismatch in real/imagpart reference");
3528 debug_generic_stmt (TREE_TYPE (expr));
3529 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3530 return true;
3533 if (TREE_CODE (expr) == COMPONENT_REF
3534 && !useless_type_conversion_p (TREE_TYPE (expr),
3535 TREE_TYPE (TREE_OPERAND (expr, 1))))
3537 error ("type mismatch in component reference");
3538 debug_generic_stmt (TREE_TYPE (expr));
3539 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3540 return true;
3543 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3544 is nothing to verify. Gross mismatches at most invoke
3545 undefined behavior. */
3547 expr = op;
3550 return verify_gimple_min_lval (expr);
3553 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3554 error, otherwise false. */
3556 static bool
3557 verify_gimple_expr (tree expr)
3559 tree type = TREE_TYPE (expr);
3561 if (is_gimple_val (expr))
3562 return false;
3564 /* Special codes we cannot handle via their class. */
3565 switch (TREE_CODE (expr))
3567 case NOP_EXPR:
3568 case CONVERT_EXPR:
3570 tree op = TREE_OPERAND (expr, 0);
3571 if (!is_gimple_val (op))
3573 error ("invalid operand in conversion");
3574 return true;
3577 /* Allow conversions between integral types. */
3578 if (INTEGRAL_TYPE_P (type) == INTEGRAL_TYPE_P (TREE_TYPE (op)))
3579 return false;
3581 /* Allow conversions between integral types and pointers only if
3582 there is no sign or zero extension involved. */
3583 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3584 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3585 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
3586 return false;
3588 /* Allow conversion from integer to offset type and vice versa. */
3589 if ((TREE_CODE (type) == OFFSET_TYPE
3590 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3591 || (TREE_CODE (type) == INTEGER_TYPE
3592 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3593 return false;
3595 /* Otherwise assert we are converting between types of the
3596 same kind. */
3597 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3599 error ("invalid types in nop conversion");
3600 debug_generic_expr (type);
3601 debug_generic_expr (TREE_TYPE (op));
3602 return true;
3605 return false;
3608 case FLOAT_EXPR:
3610 tree op = TREE_OPERAND (expr, 0);
3611 if (!is_gimple_val (op))
3613 error ("invalid operand in int to float conversion");
3614 return true;
3616 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3617 || !SCALAR_FLOAT_TYPE_P (type))
3619 error ("invalid types in conversion to floating point");
3620 debug_generic_expr (type);
3621 debug_generic_expr (TREE_TYPE (op));
3622 return true;
3624 return false;
3627 case FIX_TRUNC_EXPR:
3629 tree op = TREE_OPERAND (expr, 0);
3630 if (!is_gimple_val (op))
3632 error ("invalid operand in float to int conversion");
3633 return true;
3635 if (!INTEGRAL_TYPE_P (type)
3636 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3638 error ("invalid types in conversion to integer");
3639 debug_generic_expr (type);
3640 debug_generic_expr (TREE_TYPE (op));
3641 return true;
3643 return false;
3646 case COMPLEX_EXPR:
3648 tree op0 = TREE_OPERAND (expr, 0);
3649 tree op1 = TREE_OPERAND (expr, 1);
3650 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3652 error ("invalid operands in complex expression");
3653 return true;
3655 if (!TREE_CODE (type) == COMPLEX_TYPE
3656 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3657 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3658 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3659 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3660 || !useless_type_conversion_p (TREE_TYPE (type),
3661 TREE_TYPE (op0))
3662 || !useless_type_conversion_p (TREE_TYPE (type),
3663 TREE_TYPE (op1)))
3665 error ("type mismatch in complex expression");
3666 debug_generic_stmt (TREE_TYPE (expr));
3667 debug_generic_stmt (TREE_TYPE (op0));
3668 debug_generic_stmt (TREE_TYPE (op1));
3669 return true;
3671 return false;
3674 case CONSTRUCTOR:
3676 /* This is used like COMPLEX_EXPR but for vectors. */
3677 if (TREE_CODE (type) != VECTOR_TYPE)
3679 error ("constructor not allowed for non-vector types");
3680 debug_generic_stmt (type);
3681 return true;
3683 /* FIXME: verify constructor arguments. */
3684 return false;
3687 case LSHIFT_EXPR:
3688 case RSHIFT_EXPR:
3689 case LROTATE_EXPR:
3690 case RROTATE_EXPR:
3692 tree op0 = TREE_OPERAND (expr, 0);
3693 tree op1 = TREE_OPERAND (expr, 1);
3694 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3696 error ("invalid operands in shift expression");
3697 return true;
3699 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3700 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3702 error ("type mismatch in shift expression");
3703 debug_generic_stmt (TREE_TYPE (expr));
3704 debug_generic_stmt (TREE_TYPE (op0));
3705 debug_generic_stmt (TREE_TYPE (op1));
3706 return true;
3708 return false;
3711 case PLUS_EXPR:
3712 case MINUS_EXPR:
3714 tree op0 = TREE_OPERAND (expr, 0);
3715 tree op1 = TREE_OPERAND (expr, 1);
3716 if (POINTER_TYPE_P (type)
3717 || POINTER_TYPE_P (TREE_TYPE (op0))
3718 || POINTER_TYPE_P (TREE_TYPE (op1)))
3720 error ("invalid (pointer) operands to plus/minus");
3721 return true;
3723 /* Continue with generic binary expression handling. */
3724 break;
3727 case POINTER_PLUS_EXPR:
3729 tree op0 = TREE_OPERAND (expr, 0);
3730 tree op1 = TREE_OPERAND (expr, 1);
3731 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3733 error ("invalid operands in pointer plus expression");
3734 return true;
3736 if (!POINTER_TYPE_P (TREE_TYPE (op0))
3737 || TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE
3738 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3739 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3741 error ("type mismatch in pointer plus expression");
3742 debug_generic_stmt (type);
3743 debug_generic_stmt (TREE_TYPE (op0));
3744 debug_generic_stmt (TREE_TYPE (op1));
3745 return true;
3747 return false;
3750 case COND_EXPR:
3752 tree op0 = TREE_OPERAND (expr, 0);
3753 tree op1 = TREE_OPERAND (expr, 1);
3754 tree op2 = TREE_OPERAND (expr, 2);
3755 if ((!is_gimple_val (op1)
3756 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3757 || (!is_gimple_val (op2)
3758 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3760 error ("invalid operands in conditional expression");
3761 return true;
3763 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3764 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3765 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3766 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3767 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3769 error ("type mismatch in conditional expression");
3770 debug_generic_stmt (type);
3771 debug_generic_stmt (TREE_TYPE (op0));
3772 debug_generic_stmt (TREE_TYPE (op1));
3773 debug_generic_stmt (TREE_TYPE (op2));
3774 return true;
3776 return verify_gimple_expr (op0);
3779 case ADDR_EXPR:
3781 tree op = TREE_OPERAND (expr, 0);
3782 tree ptr_type;
3783 if (!is_gimple_addressable (op))
3785 error ("invalid operand in unary expression");
3786 return true;
3788 ptr_type = build_pointer_type (TREE_TYPE (op));
3789 if (!useless_type_conversion_p (type, ptr_type)
3790 /* FIXME: a longstanding wart, &a == &a[0]. */
3791 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
3792 || !useless_type_conversion_p (type,
3793 build_pointer_type (TREE_TYPE (TREE_TYPE (op))))))
3795 error ("type mismatch in address expression");
3796 debug_generic_stmt (TREE_TYPE (expr));
3797 debug_generic_stmt (ptr_type);
3798 return true;
3801 return verify_gimple_reference (op);
3804 case TRUTH_ANDIF_EXPR:
3805 case TRUTH_ORIF_EXPR:
3806 case TRUTH_AND_EXPR:
3807 case TRUTH_OR_EXPR:
3808 case TRUTH_XOR_EXPR:
3810 tree op0 = TREE_OPERAND (expr, 0);
3811 tree op1 = TREE_OPERAND (expr, 1);
3813 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3815 error ("invalid operands in truth expression");
3816 return true;
3819 /* We allow any kind of integral typed argument and result. */
3820 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3821 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3822 || !INTEGRAL_TYPE_P (type))
3824 error ("type mismatch in binary truth expression");
3825 debug_generic_stmt (type);
3826 debug_generic_stmt (TREE_TYPE (op0));
3827 debug_generic_stmt (TREE_TYPE (op1));
3828 return true;
3831 return false;
3834 case TRUTH_NOT_EXPR:
3836 tree op = TREE_OPERAND (expr, 0);
3838 if (!is_gimple_val (op))
3840 error ("invalid operand in unary not");
3841 return true;
3844 /* For TRUTH_NOT_EXPR we can have any kind of integral
3845 typed arguments and results. */
3846 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3847 || !INTEGRAL_TYPE_P (type))
3849 error ("type mismatch in not expression");
3850 debug_generic_expr (TREE_TYPE (expr));
3851 debug_generic_expr (TREE_TYPE (op));
3852 return true;
3855 return false;
3858 case CALL_EXPR:
3859 /* FIXME. The C frontend passes unpromoted arguments in case it
3860 didn't see a function declaration before the call. */
3861 return false;
3863 default:;
3866 /* Generic handling via classes. */
3867 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3869 case tcc_unary:
3870 return verify_gimple_unary_expr (expr);
3872 case tcc_binary:
3873 return verify_gimple_binary_expr (expr);
3875 case tcc_reference:
3876 return verify_gimple_reference (expr);
3878 case tcc_comparison:
3880 tree op0 = TREE_OPERAND (expr, 0);
3881 tree op1 = TREE_OPERAND (expr, 1);
3882 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3884 error ("invalid operands in comparison expression");
3885 return true;
3887 /* For comparisons we do not have the operations type as the
3888 effective type the comparison is carried out in. Instead
3889 we require that either the first operand is trivially
3890 convertible into the second, or the other way around.
3891 The resulting type of a comparison may be any integral type.
3892 Because we special-case pointers to void we allow
3893 comparisons of pointers with the same mode as well. */
3894 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
3895 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
3896 && (!POINTER_TYPE_P (TREE_TYPE (op0))
3897 || !POINTER_TYPE_P (TREE_TYPE (op1))
3898 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
3899 || !INTEGRAL_TYPE_P (type))
3901 error ("type mismatch in comparison expression");
3902 debug_generic_stmt (TREE_TYPE (expr));
3903 debug_generic_stmt (TREE_TYPE (op0));
3904 debug_generic_stmt (TREE_TYPE (op1));
3905 return true;
3907 break;
3910 default:
3911 gcc_unreachable ();
3914 return false;
3917 /* Verify the GIMPLE assignment statement STMT. Returns true if there
3918 is an error, otherwise false. */
3920 static bool
3921 verify_gimple_modify_stmt (const_tree stmt)
3923 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3924 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3926 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3928 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3929 TREE_TYPE (rhs)))
3931 error ("non-trivial conversion at assignment");
3932 debug_generic_expr (TREE_TYPE (lhs));
3933 debug_generic_expr (TREE_TYPE (rhs));
3934 return true;
3937 /* Loads/stores from/to a variable are ok. */
3938 if ((is_gimple_val (lhs)
3939 && is_gimple_variable (rhs))
3940 || (is_gimple_val (rhs)
3941 && is_gimple_variable (lhs)))
3942 return false;
3944 /* Aggregate copies are ok. */
3945 if (!is_gimple_reg_type (TREE_TYPE (lhs))
3946 && !is_gimple_reg_type (TREE_TYPE (rhs)))
3947 return false;
3949 /* We might get 'loads' from a parameter which is not a gimple value. */
3950 if (TREE_CODE (rhs) == PARM_DECL)
3951 return verify_gimple_expr (lhs);
3953 if (!is_gimple_variable (lhs)
3954 && verify_gimple_expr (lhs))
3955 return true;
3957 if (!is_gimple_variable (rhs)
3958 && verify_gimple_expr (rhs))
3959 return true;
3961 return false;
3964 /* Verify the GIMPLE statement STMT. Returns true if there is an
3965 error, otherwise false. */
3967 static bool
3968 verify_gimple_stmt (tree stmt)
3970 if (!is_gimple_stmt (stmt))
3972 error ("is not a valid GIMPLE statement");
3973 return true;
3976 if (OMP_DIRECTIVE_P (stmt))
3978 /* OpenMP directives are validated by the FE and never operated
3979 on by the optimizers. Furthermore, OMP_FOR may contain
3980 non-gimple expressions when the main index variable has had
3981 its address taken. This does not affect the loop itself
3982 because the header of an OMP_FOR is merely used to determine
3983 how to setup the parallel iteration. */
3984 return false;
3987 switch (TREE_CODE (stmt))
3989 case GIMPLE_MODIFY_STMT:
3990 return verify_gimple_modify_stmt (stmt);
3992 case GOTO_EXPR:
3993 case LABEL_EXPR:
3994 return false;
3996 case SWITCH_EXPR:
3997 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
3999 error ("invalid operand to switch statement");
4000 debug_generic_expr (TREE_OPERAND (stmt, 0));
4002 return false;
4004 case RETURN_EXPR:
4006 tree op = TREE_OPERAND (stmt, 0);
4008 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4010 error ("type error in return expression");
4011 return true;
4014 if (op == NULL_TREE
4015 || TREE_CODE (op) == RESULT_DECL)
4016 return false;
4018 return verify_gimple_modify_stmt (op);
4021 case CALL_EXPR:
4022 case COND_EXPR:
4023 return verify_gimple_expr (stmt);
4025 case NOP_EXPR:
4026 case CHANGE_DYNAMIC_TYPE_EXPR:
4027 case ASM_EXPR:
4028 return false;
4030 default:
4031 gcc_unreachable ();
4035 /* Verify the GIMPLE statements inside the statement list STMTS. */
4037 void
4038 verify_gimple_1 (tree stmts)
4040 tree_stmt_iterator tsi;
4042 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4044 tree stmt = tsi_stmt (tsi);
4046 switch (TREE_CODE (stmt))
4048 case BIND_EXPR:
4049 verify_gimple_1 (BIND_EXPR_BODY (stmt));
4050 break;
4052 case TRY_CATCH_EXPR:
4053 case TRY_FINALLY_EXPR:
4054 verify_gimple_1 (TREE_OPERAND (stmt, 0));
4055 verify_gimple_1 (TREE_OPERAND (stmt, 1));
4056 break;
4058 case CATCH_EXPR:
4059 verify_gimple_1 (CATCH_BODY (stmt));
4060 break;
4062 case EH_FILTER_EXPR:
4063 verify_gimple_1 (EH_FILTER_FAILURE (stmt));
4064 break;
4066 default:
4067 if (verify_gimple_stmt (stmt))
4068 debug_generic_expr (stmt);
4073 /* Verify the GIMPLE statements inside the current function. */
4075 void
4076 verify_gimple (void)
4078 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4081 /* Verify STMT, return true if STMT is not in GIMPLE form.
4082 TODO: Implement type checking. */
4084 static bool
4085 verify_stmt (tree stmt, bool last_in_block)
4087 tree addr;
4089 if (OMP_DIRECTIVE_P (stmt))
4091 /* OpenMP directives are validated by the FE and never operated
4092 on by the optimizers. Furthermore, OMP_FOR may contain
4093 non-gimple expressions when the main index variable has had
4094 its address taken. This does not affect the loop itself
4095 because the header of an OMP_FOR is merely used to determine
4096 how to setup the parallel iteration. */
4097 return false;
4100 if (!is_gimple_stmt (stmt))
4102 error ("is not a valid GIMPLE statement");
4103 goto fail;
4106 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4107 if (addr)
4109 debug_generic_stmt (addr);
4110 return true;
4113 /* If the statement is marked as part of an EH region, then it is
4114 expected that the statement could throw. Verify that when we
4115 have optimizations that simplify statements such that we prove
4116 that they cannot throw, that we update other data structures
4117 to match. */
4118 if (lookup_stmt_eh_region (stmt) >= 0)
4120 if (!tree_could_throw_p (stmt))
4122 error ("statement marked for throw, but doesn%'t");
4123 goto fail;
4125 if (!last_in_block && tree_can_throw_internal (stmt))
4127 error ("statement marked for throw in middle of block");
4128 goto fail;
4132 return false;
4134 fail:
4135 debug_generic_stmt (stmt);
4136 return true;
4140 /* Return true when the T can be shared. */
4142 static bool
4143 tree_node_can_be_shared (tree t)
4145 if (IS_TYPE_OR_DECL_P (t)
4146 || is_gimple_min_invariant (t)
4147 || TREE_CODE (t) == SSA_NAME
4148 || t == error_mark_node
4149 || TREE_CODE (t) == IDENTIFIER_NODE)
4150 return true;
4152 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4153 return true;
4155 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4156 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4157 || TREE_CODE (t) == COMPONENT_REF
4158 || TREE_CODE (t) == REALPART_EXPR
4159 || TREE_CODE (t) == IMAGPART_EXPR)
4160 t = TREE_OPERAND (t, 0);
4162 if (DECL_P (t))
4163 return true;
4165 return false;
4169 /* Called via walk_trees. Verify tree sharing. */
4171 static tree
4172 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4174 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4176 if (tree_node_can_be_shared (*tp))
4178 *walk_subtrees = false;
4179 return NULL;
4182 if (pointer_set_insert (visited, *tp))
4183 return *tp;
4185 return NULL;
4189 /* Helper function for verify_gimple_tuples. */
4191 static tree
4192 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4193 void *data ATTRIBUTE_UNUSED)
4195 switch (TREE_CODE (*tp))
4197 case MODIFY_EXPR:
4198 error ("unexpected non-tuple");
4199 debug_tree (*tp);
4200 gcc_unreachable ();
4201 return NULL_TREE;
4203 default:
4204 return NULL_TREE;
4208 /* Verify that there are no trees that should have been converted to
4209 gimple tuples. Return true if T contains a node that should have
4210 been converted to a gimple tuple, but hasn't. */
4212 static bool
4213 verify_gimple_tuples (tree t)
4215 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4218 static bool eh_error_found;
4219 static int
4220 verify_eh_throw_stmt_node (void **slot, void *data)
4222 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4223 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4225 if (!pointer_set_contains (visited, node->stmt))
4227 error ("Dead STMT in EH table");
4228 debug_generic_stmt (node->stmt);
4229 eh_error_found = true;
4231 return 0;
4234 /* Verify the GIMPLE statement chain. */
4236 void
4237 verify_stmts (void)
4239 basic_block bb;
4240 block_stmt_iterator bsi;
4241 bool err = false;
4242 struct pointer_set_t *visited, *visited_stmts;
4243 tree addr;
4245 timevar_push (TV_TREE_STMT_VERIFY);
4246 visited = pointer_set_create ();
4247 visited_stmts = pointer_set_create ();
4249 FOR_EACH_BB (bb)
4251 tree phi;
4252 int i;
4254 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4256 int phi_num_args = PHI_NUM_ARGS (phi);
4258 pointer_set_insert (visited_stmts, phi);
4259 if (bb_for_stmt (phi) != bb)
4261 error ("bb_for_stmt (phi) is set to a wrong basic block");
4262 err |= true;
4265 for (i = 0; i < phi_num_args; i++)
4267 tree t = PHI_ARG_DEF (phi, i);
4268 tree addr;
4270 /* Addressable variables do have SSA_NAMEs but they
4271 are not considered gimple values. */
4272 if (TREE_CODE (t) != SSA_NAME
4273 && TREE_CODE (t) != FUNCTION_DECL
4274 && !is_gimple_val (t))
4276 error ("PHI def is not a GIMPLE value");
4277 debug_generic_stmt (phi);
4278 debug_generic_stmt (t);
4279 err |= true;
4282 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
4283 if (addr)
4285 debug_generic_stmt (addr);
4286 err |= true;
4289 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4290 if (addr)
4292 error ("incorrect sharing of tree nodes");
4293 debug_generic_stmt (phi);
4294 debug_generic_stmt (addr);
4295 err |= true;
4300 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4302 tree stmt = bsi_stmt (bsi);
4304 pointer_set_insert (visited_stmts, stmt);
4305 err |= verify_gimple_tuples (stmt);
4307 if (bb_for_stmt (stmt) != bb)
4309 error ("bb_for_stmt (stmt) is set to a wrong basic block");
4310 err |= true;
4313 bsi_next (&bsi);
4314 err |= verify_stmt (stmt, bsi_end_p (bsi));
4315 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
4316 if (addr)
4318 error ("incorrect sharing of tree nodes");
4319 debug_generic_stmt (stmt);
4320 debug_generic_stmt (addr);
4321 err |= true;
4325 eh_error_found = false;
4326 if (get_eh_throw_stmt_table (cfun))
4327 htab_traverse (get_eh_throw_stmt_table (cfun),
4328 verify_eh_throw_stmt_node,
4329 visited_stmts);
4331 if (err | eh_error_found)
4332 internal_error ("verify_stmts failed");
4334 pointer_set_destroy (visited);
4335 pointer_set_destroy (visited_stmts);
4336 verify_histograms ();
4337 timevar_pop (TV_TREE_STMT_VERIFY);
4341 /* Verifies that the flow information is OK. */
4343 static int
4344 tree_verify_flow_info (void)
4346 int err = 0;
4347 basic_block bb;
4348 block_stmt_iterator bsi;
4349 tree stmt;
4350 edge e;
4351 edge_iterator ei;
4353 if (ENTRY_BLOCK_PTR->il.tree)
4355 error ("ENTRY_BLOCK has IL associated with it");
4356 err = 1;
4359 if (EXIT_BLOCK_PTR->il.tree)
4361 error ("EXIT_BLOCK has IL associated with it");
4362 err = 1;
4365 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4366 if (e->flags & EDGE_FALLTHRU)
4368 error ("fallthru to exit from bb %d", e->src->index);
4369 err = 1;
4372 FOR_EACH_BB (bb)
4374 bool found_ctrl_stmt = false;
4376 stmt = NULL_TREE;
4378 /* Skip labels on the start of basic block. */
4379 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4381 tree prev_stmt = stmt;
4383 stmt = bsi_stmt (bsi);
4385 if (TREE_CODE (stmt) != LABEL_EXPR)
4386 break;
4388 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4390 error ("nonlocal label ");
4391 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4392 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4393 bb->index);
4394 err = 1;
4397 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
4399 error ("label ");
4400 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4401 fprintf (stderr, " to block does not match in bb %d",
4402 bb->index);
4403 err = 1;
4406 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
4407 != current_function_decl)
4409 error ("label ");
4410 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4411 fprintf (stderr, " has incorrect context in bb %d",
4412 bb->index);
4413 err = 1;
4417 /* Verify that body of basic block BB is free of control flow. */
4418 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4420 tree stmt = bsi_stmt (bsi);
4422 if (found_ctrl_stmt)
4424 error ("control flow in the middle of basic block %d",
4425 bb->index);
4426 err = 1;
4429 if (stmt_ends_bb_p (stmt))
4430 found_ctrl_stmt = true;
4432 if (TREE_CODE (stmt) == LABEL_EXPR)
4434 error ("label ");
4435 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4436 fprintf (stderr, " in the middle of basic block %d", bb->index);
4437 err = 1;
4441 bsi = bsi_last (bb);
4442 if (bsi_end_p (bsi))
4443 continue;
4445 stmt = bsi_stmt (bsi);
4447 err |= verify_eh_edges (stmt);
4449 if (is_ctrl_stmt (stmt))
4451 FOR_EACH_EDGE (e, ei, bb->succs)
4452 if (e->flags & EDGE_FALLTHRU)
4454 error ("fallthru edge after a control statement in bb %d",
4455 bb->index);
4456 err = 1;
4460 if (TREE_CODE (stmt) != COND_EXPR)
4462 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4463 after anything else but if statement. */
4464 FOR_EACH_EDGE (e, ei, bb->succs)
4465 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4467 error ("true/false edge after a non-COND_EXPR in bb %d",
4468 bb->index);
4469 err = 1;
4473 switch (TREE_CODE (stmt))
4475 case COND_EXPR:
4477 edge true_edge;
4478 edge false_edge;
4480 if (COND_EXPR_THEN (stmt) != NULL_TREE
4481 || COND_EXPR_ELSE (stmt) != NULL_TREE)
4483 error ("COND_EXPR with code in branches at the end of bb %d",
4484 bb->index);
4485 err = 1;
4488 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4490 if (!true_edge || !false_edge
4491 || !(true_edge->flags & EDGE_TRUE_VALUE)
4492 || !(false_edge->flags & EDGE_FALSE_VALUE)
4493 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4494 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4495 || EDGE_COUNT (bb->succs) >= 3)
4497 error ("wrong outgoing edge flags at end of bb %d",
4498 bb->index);
4499 err = 1;
4502 break;
4504 case GOTO_EXPR:
4505 if (simple_goto_p (stmt))
4507 error ("explicit goto at end of bb %d", bb->index);
4508 err = 1;
4510 else
4512 /* FIXME. We should double check that the labels in the
4513 destination blocks have their address taken. */
4514 FOR_EACH_EDGE (e, ei, bb->succs)
4515 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4516 | EDGE_FALSE_VALUE))
4517 || !(e->flags & EDGE_ABNORMAL))
4519 error ("wrong outgoing edge flags at end of bb %d",
4520 bb->index);
4521 err = 1;
4524 break;
4526 case RETURN_EXPR:
4527 if (!single_succ_p (bb)
4528 || (single_succ_edge (bb)->flags
4529 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4530 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4532 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4533 err = 1;
4535 if (single_succ (bb) != EXIT_BLOCK_PTR)
4537 error ("return edge does not point to exit in bb %d",
4538 bb->index);
4539 err = 1;
4541 break;
4543 case SWITCH_EXPR:
4545 tree prev;
4546 edge e;
4547 size_t i, n;
4548 tree vec;
4550 vec = SWITCH_LABELS (stmt);
4551 n = TREE_VEC_LENGTH (vec);
4553 /* Mark all the destination basic blocks. */
4554 for (i = 0; i < n; ++i)
4556 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4557 basic_block label_bb = label_to_block (lab);
4559 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4560 label_bb->aux = (void *)1;
4563 /* Verify that the case labels are sorted. */
4564 prev = TREE_VEC_ELT (vec, 0);
4565 for (i = 1; i < n - 1; ++i)
4567 tree c = TREE_VEC_ELT (vec, i);
4568 if (! CASE_LOW (c))
4570 error ("found default case not at end of case vector");
4571 err = 1;
4572 continue;
4574 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4576 error ("case labels not sorted: ");
4577 print_generic_expr (stderr, prev, 0);
4578 fprintf (stderr," is greater than ");
4579 print_generic_expr (stderr, c, 0);
4580 fprintf (stderr," but comes before it.\n");
4581 err = 1;
4583 prev = c;
4585 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4587 error ("no default case found at end of case vector");
4588 err = 1;
4591 FOR_EACH_EDGE (e, ei, bb->succs)
4593 if (!e->dest->aux)
4595 error ("extra outgoing edge %d->%d",
4596 bb->index, e->dest->index);
4597 err = 1;
4599 e->dest->aux = (void *)2;
4600 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4601 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4603 error ("wrong outgoing edge flags at end of bb %d",
4604 bb->index);
4605 err = 1;
4609 /* Check that we have all of them. */
4610 for (i = 0; i < n; ++i)
4612 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4613 basic_block label_bb = label_to_block (lab);
4615 if (label_bb->aux != (void *)2)
4617 error ("missing edge %i->%i",
4618 bb->index, label_bb->index);
4619 err = 1;
4623 FOR_EACH_EDGE (e, ei, bb->succs)
4624 e->dest->aux = (void *)0;
4627 default: ;
4631 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4632 verify_dominators (CDI_DOMINATORS);
4634 return err;
4638 /* Updates phi nodes after creating a forwarder block joined
4639 by edge FALLTHRU. */
4641 static void
4642 tree_make_forwarder_block (edge fallthru)
4644 edge e;
4645 edge_iterator ei;
4646 basic_block dummy, bb;
4647 tree phi, new_phi, var;
4649 dummy = fallthru->src;
4650 bb = fallthru->dest;
4652 if (single_pred_p (bb))
4653 return;
4655 /* If we redirected a branch we must create new PHI nodes at the
4656 start of BB. */
4657 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4659 var = PHI_RESULT (phi);
4660 new_phi = create_phi_node (var, bb);
4661 SSA_NAME_DEF_STMT (var) = new_phi;
4662 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4663 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4666 /* Ensure that the PHI node chain is in the same order. */
4667 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4669 /* Add the arguments we have stored on edges. */
4670 FOR_EACH_EDGE (e, ei, bb->preds)
4672 if (e == fallthru)
4673 continue;
4675 flush_pending_stmts (e);
4680 /* Return a non-special label in the head of basic block BLOCK.
4681 Create one if it doesn't exist. */
4683 tree
4684 tree_block_label (basic_block bb)
4686 block_stmt_iterator i, s = bsi_start (bb);
4687 bool first = true;
4688 tree label, stmt;
4690 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4692 stmt = bsi_stmt (i);
4693 if (TREE_CODE (stmt) != LABEL_EXPR)
4694 break;
4695 label = LABEL_EXPR_LABEL (stmt);
4696 if (!DECL_NONLOCAL (label))
4698 if (!first)
4699 bsi_move_before (&i, &s);
4700 return label;
4704 label = create_artificial_label ();
4705 stmt = build1 (LABEL_EXPR, void_type_node, label);
4706 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4707 return label;
4711 /* Attempt to perform edge redirection by replacing a possibly complex
4712 jump instruction by a goto or by removing the jump completely.
4713 This can apply only if all edges now point to the same block. The
4714 parameters and return values are equivalent to
4715 redirect_edge_and_branch. */
4717 static edge
4718 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4720 basic_block src = e->src;
4721 block_stmt_iterator b;
4722 tree stmt;
4724 /* We can replace or remove a complex jump only when we have exactly
4725 two edges. */
4726 if (EDGE_COUNT (src->succs) != 2
4727 /* Verify that all targets will be TARGET. Specifically, the
4728 edge that is not E must also go to TARGET. */
4729 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4730 return NULL;
4732 b = bsi_last (src);
4733 if (bsi_end_p (b))
4734 return NULL;
4735 stmt = bsi_stmt (b);
4737 if (TREE_CODE (stmt) == COND_EXPR
4738 || TREE_CODE (stmt) == SWITCH_EXPR)
4740 bsi_remove (&b, true);
4741 e = ssa_redirect_edge (e, target);
4742 e->flags = EDGE_FALLTHRU;
4743 return e;
4746 return NULL;
4750 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4751 edge representing the redirected branch. */
4753 static edge
4754 tree_redirect_edge_and_branch (edge e, basic_block dest)
4756 basic_block bb = e->src;
4757 block_stmt_iterator bsi;
4758 edge ret;
4759 tree stmt;
4761 if (e->flags & EDGE_ABNORMAL)
4762 return NULL;
4764 if (e->src != ENTRY_BLOCK_PTR
4765 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4766 return ret;
4768 if (e->dest == dest)
4769 return NULL;
4771 bsi = bsi_last (bb);
4772 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4774 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4776 case COND_EXPR:
4777 /* For COND_EXPR, we only need to redirect the edge. */
4778 break;
4780 case GOTO_EXPR:
4781 /* No non-abnormal edges should lead from a non-simple goto, and
4782 simple ones should be represented implicitly. */
4783 gcc_unreachable ();
4785 case SWITCH_EXPR:
4787 tree cases = get_cases_for_edge (e, stmt);
4788 tree label = tree_block_label (dest);
4790 /* If we have a list of cases associated with E, then use it
4791 as it's a lot faster than walking the entire case vector. */
4792 if (cases)
4794 edge e2 = find_edge (e->src, dest);
4795 tree last, first;
4797 first = cases;
4798 while (cases)
4800 last = cases;
4801 CASE_LABEL (cases) = label;
4802 cases = TREE_CHAIN (cases);
4805 /* If there was already an edge in the CFG, then we need
4806 to move all the cases associated with E to E2. */
4807 if (e2)
4809 tree cases2 = get_cases_for_edge (e2, stmt);
4811 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4812 TREE_CHAIN (cases2) = first;
4815 else
4817 tree vec = SWITCH_LABELS (stmt);
4818 size_t i, n = TREE_VEC_LENGTH (vec);
4820 for (i = 0; i < n; i++)
4822 tree elt = TREE_VEC_ELT (vec, i);
4824 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4825 CASE_LABEL (elt) = label;
4829 break;
4832 case RETURN_EXPR:
4833 bsi_remove (&bsi, true);
4834 e->flags |= EDGE_FALLTHRU;
4835 break;
4837 case OMP_RETURN:
4838 case OMP_CONTINUE:
4839 case OMP_SECTIONS_SWITCH:
4840 case OMP_FOR:
4841 /* The edges from OMP constructs can be simply redirected. */
4842 break;
4844 default:
4845 /* Otherwise it must be a fallthru edge, and we don't need to
4846 do anything besides redirecting it. */
4847 gcc_assert (e->flags & EDGE_FALLTHRU);
4848 break;
4851 /* Update/insert PHI nodes as necessary. */
4853 /* Now update the edges in the CFG. */
4854 e = ssa_redirect_edge (e, dest);
4856 return e;
4859 /* Returns true if it is possible to remove edge E by redirecting
4860 it to the destination of the other edge from E->src. */
4862 static bool
4863 tree_can_remove_branch_p (const_edge e)
4865 if (e->flags & EDGE_ABNORMAL)
4866 return false;
4868 return true;
4871 /* Simple wrapper, as we can always redirect fallthru edges. */
4873 static basic_block
4874 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4876 e = tree_redirect_edge_and_branch (e, dest);
4877 gcc_assert (e);
4879 return NULL;
4883 /* Splits basic block BB after statement STMT (but at least after the
4884 labels). If STMT is NULL, BB is split just after the labels. */
4886 static basic_block
4887 tree_split_block (basic_block bb, void *stmt)
4889 block_stmt_iterator bsi;
4890 tree_stmt_iterator tsi_tgt;
4891 tree act, list;
4892 basic_block new_bb;
4893 edge e;
4894 edge_iterator ei;
4896 new_bb = create_empty_bb (bb);
4898 /* Redirect the outgoing edges. */
4899 new_bb->succs = bb->succs;
4900 bb->succs = NULL;
4901 FOR_EACH_EDGE (e, ei, new_bb->succs)
4902 e->src = new_bb;
4904 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4905 stmt = NULL;
4907 /* Move everything from BSI to the new basic block. */
4908 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4910 act = bsi_stmt (bsi);
4911 if (TREE_CODE (act) == LABEL_EXPR)
4912 continue;
4914 if (!stmt)
4915 break;
4917 if (stmt == act)
4919 bsi_next (&bsi);
4920 break;
4924 if (bsi_end_p (bsi))
4925 return new_bb;
4927 /* Split the statement list - avoid re-creating new containers as this
4928 brings ugly quadratic memory consumption in the inliner.
4929 (We are still quadratic since we need to update stmt BB pointers,
4930 sadly.) */
4931 list = tsi_split_statement_list_before (&bsi.tsi);
4932 set_bb_stmt_list (new_bb, list);
4933 for (tsi_tgt = tsi_start (list);
4934 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4935 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4937 return new_bb;
4941 /* Moves basic block BB after block AFTER. */
4943 static bool
4944 tree_move_block_after (basic_block bb, basic_block after)
4946 if (bb->prev_bb == after)
4947 return true;
4949 unlink_block (bb);
4950 link_block (bb, after);
4952 return true;
4956 /* Return true if basic_block can be duplicated. */
4958 static bool
4959 tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4961 return true;
4965 /* Create a duplicate of the basic block BB. NOTE: This does not
4966 preserve SSA form. */
4968 static basic_block
4969 tree_duplicate_bb (basic_block bb)
4971 basic_block new_bb;
4972 block_stmt_iterator bsi, bsi_tgt;
4973 tree phi;
4975 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4977 /* Copy the PHI nodes. We ignore PHI node arguments here because
4978 the incoming edges have not been setup yet. */
4979 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4981 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4982 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4985 /* Keep the chain of PHI nodes in the same order so that they can be
4986 updated by ssa_redirect_edge. */
4987 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4989 bsi_tgt = bsi_start (new_bb);
4990 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4992 def_operand_p def_p;
4993 ssa_op_iter op_iter;
4994 tree stmt, copy;
4995 int region;
4997 stmt = bsi_stmt (bsi);
4998 if (TREE_CODE (stmt) == LABEL_EXPR)
4999 continue;
5001 /* Create a new copy of STMT and duplicate STMT's virtual
5002 operands. */
5003 copy = unshare_expr (stmt);
5004 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
5005 copy_virtual_operands (copy, stmt);
5006 region = lookup_stmt_eh_region (stmt);
5007 if (region >= 0)
5008 add_stmt_to_eh_region (copy, region);
5009 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5011 /* Create new names for all the definitions created by COPY and
5012 add replacement mappings for each new name. */
5013 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5014 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5017 return new_bb;
5021 /* Basic block BB_COPY was created by code duplication. Add phi node
5022 arguments for edges going out of BB_COPY. The blocks that were
5023 duplicated have BB_DUPLICATED set. */
5025 void
5026 add_phi_args_after_copy_bb (basic_block bb_copy)
5028 basic_block bb, dest;
5029 edge e, e_copy;
5030 edge_iterator ei;
5031 tree phi, phi_copy, phi_next, def;
5033 bb = get_bb_original (bb_copy);
5035 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5037 if (!phi_nodes (e_copy->dest))
5038 continue;
5040 if (e_copy->dest->flags & BB_DUPLICATED)
5041 dest = get_bb_original (e_copy->dest);
5042 else
5043 dest = e_copy->dest;
5045 e = find_edge (bb, dest);
5046 if (!e)
5048 /* During loop unrolling the target of the latch edge is copied.
5049 In this case we are not looking for edge to dest, but to
5050 duplicated block whose original was dest. */
5051 FOR_EACH_EDGE (e, ei, bb->succs)
5052 if ((e->dest->flags & BB_DUPLICATED)
5053 && get_bb_original (e->dest) == dest)
5054 break;
5056 gcc_assert (e != NULL);
5059 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5060 phi;
5061 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5063 phi_next = PHI_CHAIN (phi);
5064 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5065 add_phi_arg (phi_copy, def, e_copy);
5070 /* Blocks in REGION_COPY array of length N_REGION were created by
5071 duplication of basic blocks. Add phi node arguments for edges
5072 going from these blocks. */
5074 void
5075 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
5077 unsigned i;
5079 for (i = 0; i < n_region; i++)
5080 region_copy[i]->flags |= BB_DUPLICATED;
5082 for (i = 0; i < n_region; i++)
5083 add_phi_args_after_copy_bb (region_copy[i]);
5085 for (i = 0; i < n_region; i++)
5086 region_copy[i]->flags &= ~BB_DUPLICATED;
5089 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5090 important exit edge EXIT. By important we mean that no SSA name defined
5091 inside region is live over the other exit edges of the region. All entry
5092 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5093 to the duplicate of the region. SSA form, dominance and loop information
5094 is updated. The new basic blocks are stored to REGION_COPY in the same
5095 order as they had in REGION, provided that REGION_COPY is not NULL.
5096 The function returns false if it is unable to copy the region,
5097 true otherwise. */
5099 bool
5100 tree_duplicate_sese_region (edge entry, edge exit,
5101 basic_block *region, unsigned n_region,
5102 basic_block *region_copy)
5104 unsigned i;
5105 bool free_region_copy = false, copying_header = false;
5106 struct loop *loop = entry->dest->loop_father;
5107 edge exit_copy;
5108 VEC (basic_block, heap) *doms;
5109 edge redirected;
5110 int total_freq = 0, entry_freq = 0;
5111 gcov_type total_count = 0, entry_count = 0;
5113 if (!can_copy_bbs_p (region, n_region))
5114 return false;
5116 /* Some sanity checking. Note that we do not check for all possible
5117 missuses of the functions. I.e. if you ask to copy something weird,
5118 it will work, but the state of structures probably will not be
5119 correct. */
5120 for (i = 0; i < n_region; i++)
5122 /* We do not handle subloops, i.e. all the blocks must belong to the
5123 same loop. */
5124 if (region[i]->loop_father != loop)
5125 return false;
5127 if (region[i] != entry->dest
5128 && region[i] == loop->header)
5129 return false;
5132 set_loop_copy (loop, loop);
5134 /* In case the function is used for loop header copying (which is the primary
5135 use), ensure that EXIT and its copy will be new latch and entry edges. */
5136 if (loop->header == entry->dest)
5138 copying_header = true;
5139 set_loop_copy (loop, loop_outer (loop));
5141 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5142 return false;
5144 for (i = 0; i < n_region; i++)
5145 if (region[i] != exit->src
5146 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5147 return false;
5150 if (!region_copy)
5152 region_copy = XNEWVEC (basic_block, n_region);
5153 free_region_copy = true;
5156 gcc_assert (!need_ssa_update_p ());
5158 /* Record blocks outside the region that are dominated by something
5159 inside. */
5160 doms = NULL;
5161 initialize_original_copy_tables ();
5163 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5165 if (entry->dest->count)
5167 total_count = entry->dest->count;
5168 entry_count = entry->count;
5169 /* Fix up corner cases, to avoid division by zero or creation of negative
5170 frequencies. */
5171 if (entry_count > total_count)
5172 entry_count = total_count;
5174 else
5176 total_freq = entry->dest->frequency;
5177 entry_freq = EDGE_FREQUENCY (entry);
5178 /* Fix up corner cases, to avoid division by zero or creation of negative
5179 frequencies. */
5180 if (total_freq == 0)
5181 total_freq = 1;
5182 else if (entry_freq > total_freq)
5183 entry_freq = total_freq;
5186 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5187 split_edge_bb_loc (entry));
5188 if (total_count)
5190 scale_bbs_frequencies_gcov_type (region, n_region,
5191 total_count - entry_count,
5192 total_count);
5193 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5194 total_count);
5196 else
5198 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5199 total_freq);
5200 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5203 if (copying_header)
5205 loop->header = exit->dest;
5206 loop->latch = exit->src;
5209 /* Redirect the entry and add the phi node arguments. */
5210 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5211 gcc_assert (redirected != NULL);
5212 flush_pending_stmts (entry);
5214 /* Concerning updating of dominators: We must recount dominators
5215 for entry block and its copy. Anything that is outside of the
5216 region, but was dominated by something inside needs recounting as
5217 well. */
5218 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5219 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5220 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5221 free (doms);
5223 /* Add the other PHI node arguments. */
5224 add_phi_args_after_copy (region_copy, n_region);
5226 /* Update the SSA web. */
5227 update_ssa (TODO_update_ssa);
5229 if (free_region_copy)
5230 free (region_copy);
5232 free_original_copy_tables ();
5233 return true;
5237 DEF_VEC_P(basic_block);
5238 DEF_VEC_ALLOC_P(basic_block,heap);
5241 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5242 adding blocks when the dominator traversal reaches EXIT. This
5243 function silently assumes that ENTRY strictly dominates EXIT. */
5245 static void
5246 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5247 VEC(basic_block,heap) **bbs_p)
5249 basic_block son;
5251 for (son = first_dom_son (CDI_DOMINATORS, entry);
5252 son;
5253 son = next_dom_son (CDI_DOMINATORS, son))
5255 VEC_safe_push (basic_block, heap, *bbs_p, son);
5256 if (son != exit)
5257 gather_blocks_in_sese_region (son, exit, bbs_p);
5262 struct move_stmt_d
5264 tree block;
5265 tree from_context;
5266 tree to_context;
5267 bitmap vars_to_remove;
5268 htab_t new_label_map;
5269 bool remap_decls_p;
5272 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5273 contained in *TP and change the DECL_CONTEXT of every local
5274 variable referenced in *TP. */
5276 static tree
5277 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
5279 struct move_stmt_d *p = (struct move_stmt_d *) data;
5280 tree t = *tp;
5282 if (p->block
5283 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
5284 TREE_BLOCK (t) = p->block;
5286 if (OMP_DIRECTIVE_P (t)
5287 && TREE_CODE (t) != OMP_RETURN
5288 && TREE_CODE (t) != OMP_CONTINUE)
5290 /* Do not remap variables inside OMP directives. Variables
5291 referenced in clauses and directive header belong to the
5292 parent function and should not be moved into the child
5293 function. */
5294 bool save_remap_decls_p = p->remap_decls_p;
5295 p->remap_decls_p = false;
5296 *walk_subtrees = 0;
5298 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
5300 p->remap_decls_p = save_remap_decls_p;
5302 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
5304 if (TREE_CODE (t) == LABEL_DECL)
5306 if (p->new_label_map)
5308 struct tree_map in, *out;
5309 in.base.from = t;
5310 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5311 if (out)
5312 *tp = t = out->to;
5315 DECL_CONTEXT (t) = p->to_context;
5317 else if (p->remap_decls_p)
5319 DECL_CONTEXT (t) = p->to_context;
5321 if (TREE_CODE (t) == VAR_DECL)
5323 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
5324 f->unexpanded_var_list
5325 = tree_cons (0, t, f->unexpanded_var_list);
5327 /* Mark T to be removed from the original function,
5328 otherwise it will be given a DECL_RTL when the
5329 original function is expanded. */
5330 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
5334 else if (TYPE_P (t))
5335 *walk_subtrees = 0;
5337 return NULL_TREE;
5341 /* Move basic block BB from function CFUN to function DEST_FN. The
5342 block is moved out of the original linked list and placed after
5343 block AFTER in the new list. Also, the block is removed from the
5344 original array of blocks and placed in DEST_FN's array of blocks.
5345 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5346 updated to reflect the moved edges.
5348 On exit, local variables that need to be removed from
5349 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
5351 static void
5352 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5353 basic_block after, bool update_edge_count_p,
5354 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
5356 struct control_flow_graph *cfg;
5357 edge_iterator ei;
5358 edge e;
5359 block_stmt_iterator si;
5360 struct move_stmt_d d;
5361 unsigned old_len, new_len;
5363 /* Remove BB from dominance structures. */
5364 delete_from_dominance_info (CDI_DOMINATORS, bb);
5366 /* Link BB to the new linked list. */
5367 move_block_after (bb, after);
5369 /* Update the edge count in the corresponding flowgraphs. */
5370 if (update_edge_count_p)
5371 FOR_EACH_EDGE (e, ei, bb->succs)
5373 cfun->cfg->x_n_edges--;
5374 dest_cfun->cfg->x_n_edges++;
5377 /* Remove BB from the original basic block array. */
5378 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5379 cfun->cfg->x_n_basic_blocks--;
5381 /* Grow DEST_CFUN's basic block array if needed. */
5382 cfg = dest_cfun->cfg;
5383 cfg->x_n_basic_blocks++;
5384 if (bb->index >= cfg->x_last_basic_block)
5385 cfg->x_last_basic_block = bb->index + 1;
5387 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5388 if ((unsigned) cfg->x_last_basic_block >= old_len)
5390 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5391 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5392 new_len);
5395 VEC_replace (basic_block, cfg->x_basic_block_info,
5396 bb->index, bb);
5398 /* The statements in BB need to be associated with a new TREE_BLOCK.
5399 Labels need to be associated with a new label-to-block map. */
5400 memset (&d, 0, sizeof (d));
5401 d.vars_to_remove = vars_to_remove;
5403 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5405 tree stmt = bsi_stmt (si);
5406 int region;
5408 d.from_context = cfun->decl;
5409 d.to_context = dest_cfun->decl;
5410 d.remap_decls_p = true;
5411 d.new_label_map = new_label_map;
5412 if (TREE_BLOCK (stmt))
5413 d.block = DECL_INITIAL (dest_cfun->decl);
5415 walk_tree (&stmt, move_stmt_r, &d, NULL);
5417 if (TREE_CODE (stmt) == LABEL_EXPR)
5419 tree label = LABEL_EXPR_LABEL (stmt);
5420 int uid = LABEL_DECL_UID (label);
5422 gcc_assert (uid > -1);
5424 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5425 if (old_len <= (unsigned) uid)
5427 new_len = 3 * uid / 2;
5428 VEC_safe_grow_cleared (basic_block, gc,
5429 cfg->x_label_to_block_map, new_len);
5432 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5433 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5435 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5437 if (uid >= dest_cfun->last_label_uid)
5438 dest_cfun->last_label_uid = uid + 1;
5440 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5441 TREE_OPERAND (stmt, 0) =
5442 build_int_cst (NULL_TREE,
5443 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5444 + eh_offset);
5446 region = lookup_stmt_eh_region (stmt);
5447 if (region >= 0)
5449 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5450 remove_stmt_from_eh_region (stmt);
5451 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5452 gimple_remove_stmt_histograms (cfun, stmt);
5457 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5458 the outermost EH region. Use REGION as the incoming base EH region. */
5460 static int
5461 find_outermost_region_in_block (struct function *src_cfun,
5462 basic_block bb, int region)
5464 block_stmt_iterator si;
5466 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5468 tree stmt = bsi_stmt (si);
5469 int stmt_region;
5471 if (TREE_CODE (stmt) == RESX_EXPR)
5472 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5473 else
5474 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5475 if (stmt_region > 0)
5477 if (region < 0)
5478 region = stmt_region;
5479 else if (stmt_region != region)
5481 region = eh_region_outermost (src_cfun, stmt_region, region);
5482 gcc_assert (region != -1);
5487 return region;
5490 static tree
5491 new_label_mapper (tree decl, void *data)
5493 htab_t hash = (htab_t) data;
5494 struct tree_map *m;
5495 void **slot;
5497 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5499 m = xmalloc (sizeof (struct tree_map));
5500 m->hash = DECL_UID (decl);
5501 m->base.from = decl;
5502 m->to = create_artificial_label ();
5503 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5505 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5506 gcc_assert (*slot == NULL);
5508 *slot = m;
5510 return m->to;
5513 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5514 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5515 single basic block in the original CFG and the new basic block is
5516 returned. DEST_CFUN must not have a CFG yet.
5518 Note that the region need not be a pure SESE region. Blocks inside
5519 the region may contain calls to abort/exit. The only restriction
5520 is that ENTRY_BB should be the only entry point and it must
5521 dominate EXIT_BB.
5523 All local variables referenced in the region are assumed to be in
5524 the corresponding BLOCK_VARS and unexpanded variable lists
5525 associated with DEST_CFUN. */
5527 basic_block
5528 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5529 basic_block exit_bb)
5531 VEC(basic_block,heap) *bbs;
5532 basic_block after, bb, *entry_pred, *exit_succ;
5533 struct function *saved_cfun;
5534 int *entry_flag, *exit_flag, eh_offset;
5535 unsigned i, num_entry_edges, num_exit_edges;
5536 edge e;
5537 edge_iterator ei;
5538 bitmap vars_to_remove;
5539 htab_t new_label_map;
5541 saved_cfun = cfun;
5543 /* Collect all the blocks in the region. Manually add ENTRY_BB
5544 because it won't be added by dfs_enumerate_from. */
5545 calculate_dominance_info (CDI_DOMINATORS);
5547 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5548 region. */
5549 gcc_assert (entry_bb != exit_bb
5550 && (!exit_bb
5551 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5553 bbs = NULL;
5554 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5555 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5557 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5558 the predecessor edges to ENTRY_BB and the successor edges to
5559 EXIT_BB so that we can re-attach them to the new basic block that
5560 will replace the region. */
5561 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5562 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5563 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5564 i = 0;
5565 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5567 entry_flag[i] = e->flags;
5568 entry_pred[i++] = e->src;
5569 remove_edge (e);
5572 if (exit_bb)
5574 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5575 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5576 sizeof (basic_block));
5577 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5578 i = 0;
5579 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5581 exit_flag[i] = e->flags;
5582 exit_succ[i++] = e->dest;
5583 remove_edge (e);
5586 else
5588 num_exit_edges = 0;
5589 exit_succ = NULL;
5590 exit_flag = NULL;
5593 /* Switch context to the child function to initialize DEST_FN's CFG. */
5594 gcc_assert (dest_cfun->cfg == NULL);
5595 cfun = dest_cfun;
5597 init_empty_tree_cfg ();
5599 /* Initialize EH information for the new function. */
5600 eh_offset = 0;
5601 new_label_map = NULL;
5602 if (saved_cfun->eh)
5604 int region = -1;
5606 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5607 region = find_outermost_region_in_block (saved_cfun, bb, region);
5609 init_eh_for_function ();
5610 if (region != -1)
5612 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
5613 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
5614 new_label_map, region, 0);
5618 cfun = saved_cfun;
5620 /* Move blocks from BBS into DEST_CFUN. */
5621 gcc_assert (VEC_length (basic_block, bbs) >= 2);
5622 after = dest_cfun->cfg->x_entry_block_ptr;
5623 vars_to_remove = BITMAP_ALLOC (NULL);
5624 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5626 /* No need to update edge counts on the last block. It has
5627 already been updated earlier when we detached the region from
5628 the original CFG. */
5629 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
5630 new_label_map, eh_offset);
5631 after = bb;
5634 if (new_label_map)
5635 htab_delete (new_label_map);
5637 /* Remove the variables marked in VARS_TO_REMOVE from
5638 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
5639 DECL_RTL in the context of CFUN. */
5640 if (!bitmap_empty_p (vars_to_remove))
5642 tree *p;
5644 for (p = &cfun->unexpanded_var_list; *p; )
5646 tree var = TREE_VALUE (*p);
5647 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
5649 *p = TREE_CHAIN (*p);
5650 continue;
5653 p = &TREE_CHAIN (*p);
5657 BITMAP_FREE (vars_to_remove);
5659 /* Rewire the entry and exit blocks. The successor to the entry
5660 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
5661 the child function. Similarly, the predecessor of DEST_FN's
5662 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
5663 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
5664 various CFG manipulation function get to the right CFG.
5666 FIXME, this is silly. The CFG ought to become a parameter to
5667 these helpers. */
5668 cfun = dest_cfun;
5669 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5670 if (exit_bb)
5671 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
5672 cfun = saved_cfun;
5674 /* Back in the original function, the SESE region has disappeared,
5675 create a new basic block in its place. */
5676 bb = create_empty_bb (entry_pred[0]);
5677 for (i = 0; i < num_entry_edges; i++)
5678 make_edge (entry_pred[i], bb, entry_flag[i]);
5680 for (i = 0; i < num_exit_edges; i++)
5681 make_edge (bb, exit_succ[i], exit_flag[i]);
5683 if (exit_bb)
5685 free (exit_flag);
5686 free (exit_succ);
5688 free (entry_flag);
5689 free (entry_pred);
5690 free_dominance_info (CDI_DOMINATORS);
5691 free_dominance_info (CDI_POST_DOMINATORS);
5692 VEC_free (basic_block, heap, bbs);
5694 return bb;
5698 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5700 void
5701 dump_function_to_file (tree fn, FILE *file, int flags)
5703 tree arg, vars, var;
5704 struct function *dsf;
5705 bool ignore_topmost_bind = false, any_var = false;
5706 basic_block bb;
5707 tree chain;
5708 struct function *saved_cfun;
5710 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5712 arg = DECL_ARGUMENTS (fn);
5713 while (arg)
5715 print_generic_expr (file, arg, dump_flags);
5716 if (TREE_CHAIN (arg))
5717 fprintf (file, ", ");
5718 arg = TREE_CHAIN (arg);
5720 fprintf (file, ")\n");
5722 dsf = DECL_STRUCT_FUNCTION (fn);
5723 if (dsf && (flags & TDF_DETAILS))
5724 dump_eh_tree (file, dsf);
5726 if (flags & TDF_RAW)
5728 dump_node (fn, TDF_SLIM | flags, file);
5729 return;
5732 /* Switch CFUN to point to FN. */
5733 saved_cfun = cfun;
5734 cfun = DECL_STRUCT_FUNCTION (fn);
5736 /* When GIMPLE is lowered, the variables are no longer available in
5737 BIND_EXPRs, so display them separately. */
5738 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5740 ignore_topmost_bind = true;
5742 fprintf (file, "{\n");
5743 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5745 var = TREE_VALUE (vars);
5747 print_generic_decl (file, var, flags);
5748 fprintf (file, "\n");
5750 any_var = true;
5754 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5756 /* Make a CFG based dump. */
5757 check_bb_profile (ENTRY_BLOCK_PTR, file);
5758 if (!ignore_topmost_bind)
5759 fprintf (file, "{\n");
5761 if (any_var && n_basic_blocks)
5762 fprintf (file, "\n");
5764 FOR_EACH_BB (bb)
5765 dump_generic_bb (file, bb, 2, flags);
5767 fprintf (file, "}\n");
5768 check_bb_profile (EXIT_BLOCK_PTR, file);
5770 else
5772 int indent;
5774 /* Make a tree based dump. */
5775 chain = DECL_SAVED_TREE (fn);
5777 if (chain && TREE_CODE (chain) == BIND_EXPR)
5779 if (ignore_topmost_bind)
5781 chain = BIND_EXPR_BODY (chain);
5782 indent = 2;
5784 else
5785 indent = 0;
5787 else
5789 if (!ignore_topmost_bind)
5790 fprintf (file, "{\n");
5791 indent = 2;
5794 if (any_var)
5795 fprintf (file, "\n");
5797 print_generic_stmt_indented (file, chain, flags, indent);
5798 if (ignore_topmost_bind)
5799 fprintf (file, "}\n");
5802 fprintf (file, "\n\n");
5804 /* Restore CFUN. */
5805 cfun = saved_cfun;
5809 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5811 void
5812 debug_function (tree fn, int flags)
5814 dump_function_to_file (fn, stderr, flags);
5818 /* Pretty print of the loops intermediate representation. */
5819 static void print_loop (FILE *, struct loop *, int);
5820 static void print_pred_bbs (FILE *, basic_block bb);
5821 static void print_succ_bbs (FILE *, basic_block bb);
5824 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5826 static void
5827 print_pred_bbs (FILE *file, basic_block bb)
5829 edge e;
5830 edge_iterator ei;
5832 FOR_EACH_EDGE (e, ei, bb->preds)
5833 fprintf (file, "bb_%d ", e->src->index);
5837 /* Print on FILE the indexes for the successors of basic_block BB. */
5839 static void
5840 print_succ_bbs (FILE *file, basic_block bb)
5842 edge e;
5843 edge_iterator ei;
5845 FOR_EACH_EDGE (e, ei, bb->succs)
5846 fprintf (file, "bb_%d ", e->dest->index);
5850 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5852 static void
5853 print_loop (FILE *file, struct loop *loop, int indent)
5855 char *s_indent;
5856 basic_block bb;
5858 if (loop == NULL)
5859 return;
5861 s_indent = (char *) alloca ((size_t) indent + 1);
5862 memset ((void *) s_indent, ' ', (size_t) indent);
5863 s_indent[indent] = '\0';
5865 /* Print the loop's header. */
5866 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5868 /* Print the loop's body. */
5869 fprintf (file, "%s{\n", s_indent);
5870 FOR_EACH_BB (bb)
5871 if (bb->loop_father == loop)
5873 /* Print the basic_block's header. */
5874 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5875 print_pred_bbs (file, bb);
5876 fprintf (file, "}, succs = {");
5877 print_succ_bbs (file, bb);
5878 fprintf (file, "})\n");
5880 /* Print the basic_block's body. */
5881 fprintf (file, "%s {\n", s_indent);
5882 tree_dump_bb (bb, file, indent + 4);
5883 fprintf (file, "%s }\n", s_indent);
5886 print_loop (file, loop->inner, indent + 2);
5887 fprintf (file, "%s}\n", s_indent);
5888 print_loop (file, loop->next, indent);
5892 /* Follow a CFG edge from the entry point of the program, and on entry
5893 of a loop, pretty print the loop structure on FILE. */
5895 void
5896 print_loop_ir (FILE *file)
5898 basic_block bb;
5900 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5901 if (bb && bb->loop_father)
5902 print_loop (file, bb->loop_father, 0);
5906 /* Debugging loops structure at tree level. */
5908 void
5909 debug_loop_ir (void)
5911 print_loop_ir (stderr);
5915 /* Return true if BB ends with a call, possibly followed by some
5916 instructions that must stay with the call. Return false,
5917 otherwise. */
5919 static bool
5920 tree_block_ends_with_call_p (const_basic_block bb)
5922 const_block_stmt_iterator bsi = cbsi_last (bb);
5923 return const_get_call_expr_in (cbsi_stmt (bsi)) != NULL;
5927 /* Return true if BB ends with a conditional branch. Return false,
5928 otherwise. */
5930 static bool
5931 tree_block_ends_with_condjump_p (const_basic_block bb)
5933 const_tree stmt = const_last_stmt (bb);
5934 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5938 /* Return true if we need to add fake edge to exit at statement T.
5939 Helper function for tree_flow_call_edges_add. */
5941 static bool
5942 need_fake_edge_p (tree t)
5944 tree call;
5946 /* NORETURN and LONGJMP calls already have an edge to exit.
5947 CONST and PURE calls do not need one.
5948 We don't currently check for CONST and PURE here, although
5949 it would be a good idea, because those attributes are
5950 figured out from the RTL in mark_constant_function, and
5951 the counter incrementation code from -fprofile-arcs
5952 leads to different results from -fbranch-probabilities. */
5953 call = get_call_expr_in (t);
5954 if (call
5955 && !(call_expr_flags (call) & ECF_NORETURN))
5956 return true;
5958 if (TREE_CODE (t) == ASM_EXPR
5959 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5960 return true;
5962 return false;
5966 /* Add fake edges to the function exit for any non constant and non
5967 noreturn calls, volatile inline assembly in the bitmap of blocks
5968 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5969 the number of blocks that were split.
5971 The goal is to expose cases in which entering a basic block does
5972 not imply that all subsequent instructions must be executed. */
5974 static int
5975 tree_flow_call_edges_add (sbitmap blocks)
5977 int i;
5978 int blocks_split = 0;
5979 int last_bb = last_basic_block;
5980 bool check_last_block = false;
5982 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5983 return 0;
5985 if (! blocks)
5986 check_last_block = true;
5987 else
5988 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5990 /* In the last basic block, before epilogue generation, there will be
5991 a fallthru edge to EXIT. Special care is required if the last insn
5992 of the last basic block is a call because make_edge folds duplicate
5993 edges, which would result in the fallthru edge also being marked
5994 fake, which would result in the fallthru edge being removed by
5995 remove_fake_edges, which would result in an invalid CFG.
5997 Moreover, we can't elide the outgoing fake edge, since the block
5998 profiler needs to take this into account in order to solve the minimal
5999 spanning tree in the case that the call doesn't return.
6001 Handle this by adding a dummy instruction in a new last basic block. */
6002 if (check_last_block)
6004 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6005 block_stmt_iterator bsi = bsi_last (bb);
6006 tree t = NULL_TREE;
6007 if (!bsi_end_p (bsi))
6008 t = bsi_stmt (bsi);
6010 if (t && need_fake_edge_p (t))
6012 edge e;
6014 e = find_edge (bb, EXIT_BLOCK_PTR);
6015 if (e)
6017 bsi_insert_on_edge (e, build_empty_stmt ());
6018 bsi_commit_edge_inserts ();
6023 /* Now add fake edges to the function exit for any non constant
6024 calls since there is no way that we can determine if they will
6025 return or not... */
6026 for (i = 0; i < last_bb; i++)
6028 basic_block bb = BASIC_BLOCK (i);
6029 block_stmt_iterator bsi;
6030 tree stmt, last_stmt;
6032 if (!bb)
6033 continue;
6035 if (blocks && !TEST_BIT (blocks, i))
6036 continue;
6038 bsi = bsi_last (bb);
6039 if (!bsi_end_p (bsi))
6041 last_stmt = bsi_stmt (bsi);
6044 stmt = bsi_stmt (bsi);
6045 if (need_fake_edge_p (stmt))
6047 edge e;
6048 /* The handling above of the final block before the
6049 epilogue should be enough to verify that there is
6050 no edge to the exit block in CFG already.
6051 Calling make_edge in such case would cause us to
6052 mark that edge as fake and remove it later. */
6053 #ifdef ENABLE_CHECKING
6054 if (stmt == last_stmt)
6056 e = find_edge (bb, EXIT_BLOCK_PTR);
6057 gcc_assert (e == NULL);
6059 #endif
6061 /* Note that the following may create a new basic block
6062 and renumber the existing basic blocks. */
6063 if (stmt != last_stmt)
6065 e = split_block (bb, stmt);
6066 if (e)
6067 blocks_split++;
6069 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6071 bsi_prev (&bsi);
6073 while (!bsi_end_p (bsi));
6077 if (blocks_split)
6078 verify_flow_info ();
6080 return blocks_split;
6083 /* Purge dead abnormal call edges from basic block BB. */
6085 bool
6086 tree_purge_dead_abnormal_call_edges (basic_block bb)
6088 bool changed = tree_purge_dead_eh_edges (bb);
6090 if (current_function_has_nonlocal_label)
6092 tree stmt = last_stmt (bb);
6093 edge_iterator ei;
6094 edge e;
6096 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6097 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6099 if (e->flags & EDGE_ABNORMAL)
6101 remove_edge (e);
6102 changed = true;
6104 else
6105 ei_next (&ei);
6108 /* See tree_purge_dead_eh_edges below. */
6109 if (changed)
6110 free_dominance_info (CDI_DOMINATORS);
6113 return changed;
6116 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6118 static void
6119 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6121 basic_block son;
6123 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6124 for (son = first_dom_son (CDI_DOMINATORS, bb);
6125 son;
6126 son = next_dom_son (CDI_DOMINATORS, son))
6127 get_all_dominated_blocks (son, dom_bbs);
6130 /* Removes edge E and all the blocks dominated by it, and updates dominance
6131 information. The IL in E->src needs to be updated separately.
6132 If dominance info is not available, only the edge E is removed.*/
6134 void
6135 remove_edge_and_dominated_blocks (edge e)
6137 VEC (basic_block, heap) *bbs_to_remove = NULL;
6138 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6139 bitmap df, df_idom;
6140 edge f;
6141 edge_iterator ei;
6142 bool none_removed = false;
6143 unsigned i;
6144 basic_block bb, dbb;
6145 bitmap_iterator bi;
6147 if (!dom_info_available_p (CDI_DOMINATORS))
6149 remove_edge (e);
6150 return;
6153 /* No updating is needed for edges to exit. */
6154 if (e->dest == EXIT_BLOCK_PTR)
6156 if (cfgcleanup_altered_bbs)
6157 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6158 remove_edge (e);
6159 return;
6162 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6163 that is not dominated by E->dest, then this set is empty. Otherwise,
6164 all the basic blocks dominated by E->dest are removed.
6166 Also, to DF_IDOM we store the immediate dominators of the blocks in
6167 the dominance frontier of E (i.e., of the successors of the
6168 removed blocks, if there are any, and of E->dest otherwise). */
6169 FOR_EACH_EDGE (f, ei, e->dest->preds)
6171 if (f == e)
6172 continue;
6174 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6176 none_removed = true;
6177 break;
6181 df = BITMAP_ALLOC (NULL);
6182 df_idom = BITMAP_ALLOC (NULL);
6184 if (none_removed)
6185 bitmap_set_bit (df_idom,
6186 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6187 else
6189 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6190 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6192 FOR_EACH_EDGE (f, ei, bb->succs)
6194 if (f->dest != EXIT_BLOCK_PTR)
6195 bitmap_set_bit (df, f->dest->index);
6198 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6199 bitmap_clear_bit (df, bb->index);
6201 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6203 bb = BASIC_BLOCK (i);
6204 bitmap_set_bit (df_idom,
6205 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6209 if (cfgcleanup_altered_bbs)
6211 /* Record the set of the altered basic blocks. */
6212 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6213 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6216 /* Remove E and the cancelled blocks. */
6217 if (none_removed)
6218 remove_edge (e);
6219 else
6221 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6222 delete_basic_block (bb);
6225 /* Update the dominance information. The immediate dominator may change only
6226 for blocks whose immediate dominator belongs to DF_IDOM:
6228 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6229 removal. Let Z the arbitrary block such that idom(Z) = Y and
6230 Z dominates X after the removal. Before removal, there exists a path P
6231 from Y to X that avoids Z. Let F be the last edge on P that is
6232 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6233 dominates W, and because of P, Z does not dominate W), and W belongs to
6234 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6235 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6237 bb = BASIC_BLOCK (i);
6238 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6239 dbb;
6240 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6241 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6244 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6246 BITMAP_FREE (df);
6247 BITMAP_FREE (df_idom);
6248 VEC_free (basic_block, heap, bbs_to_remove);
6249 VEC_free (basic_block, heap, bbs_to_fix_dom);
6252 /* Purge dead EH edges from basic block BB. */
6254 bool
6255 tree_purge_dead_eh_edges (basic_block bb)
6257 bool changed = false;
6258 edge e;
6259 edge_iterator ei;
6260 tree stmt = last_stmt (bb);
6262 if (stmt && tree_can_throw_internal (stmt))
6263 return false;
6265 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6267 if (e->flags & EDGE_EH)
6269 remove_edge_and_dominated_blocks (e);
6270 changed = true;
6272 else
6273 ei_next (&ei);
6276 return changed;
6279 bool
6280 tree_purge_all_dead_eh_edges (const_bitmap blocks)
6282 bool changed = false;
6283 unsigned i;
6284 bitmap_iterator bi;
6286 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6288 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6291 return changed;
6294 /* This function is called whenever a new edge is created or
6295 redirected. */
6297 static void
6298 tree_execute_on_growing_pred (edge e)
6300 basic_block bb = e->dest;
6302 if (phi_nodes (bb))
6303 reserve_phi_args_for_new_edge (bb);
6306 /* This function is called immediately before edge E is removed from
6307 the edge vector E->dest->preds. */
6309 static void
6310 tree_execute_on_shrinking_pred (edge e)
6312 if (phi_nodes (e->dest))
6313 remove_phi_args (e);
6316 /*---------------------------------------------------------------------------
6317 Helper functions for Loop versioning
6318 ---------------------------------------------------------------------------*/
6320 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6321 of 'first'. Both of them are dominated by 'new_head' basic block. When
6322 'new_head' was created by 'second's incoming edge it received phi arguments
6323 on the edge by split_edge(). Later, additional edge 'e' was created to
6324 connect 'new_head' and 'first'. Now this routine adds phi args on this
6325 additional edge 'e' that new_head to second edge received as part of edge
6326 splitting.
6329 static void
6330 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6331 basic_block new_head, edge e)
6333 tree phi1, phi2;
6334 edge e2 = find_edge (new_head, second);
6336 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6337 edge, we should always have an edge from NEW_HEAD to SECOND. */
6338 gcc_assert (e2 != NULL);
6340 /* Browse all 'second' basic block phi nodes and add phi args to
6341 edge 'e' for 'first' head. PHI args are always in correct order. */
6343 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6344 phi2 && phi1;
6345 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6347 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6348 add_phi_arg (phi1, def, e);
6352 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6353 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6354 the destination of the ELSE part. */
6355 static void
6356 tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6357 basic_block second_head ATTRIBUTE_UNUSED,
6358 basic_block cond_bb, void *cond_e)
6360 block_stmt_iterator bsi;
6361 tree new_cond_expr = NULL_TREE;
6362 tree cond_expr = (tree) cond_e;
6363 edge e0;
6365 /* Build new conditional expr */
6366 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6367 NULL_TREE, NULL_TREE);
6369 /* Add new cond in cond_bb. */
6370 bsi = bsi_start (cond_bb);
6371 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
6372 /* Adjust edges appropriately to connect new head with first head
6373 as well as second head. */
6374 e0 = single_succ_edge (cond_bb);
6375 e0->flags &= ~EDGE_FALLTHRU;
6376 e0->flags |= EDGE_FALSE_VALUE;
6379 struct cfg_hooks tree_cfg_hooks = {
6380 "tree",
6381 tree_verify_flow_info,
6382 tree_dump_bb, /* dump_bb */
6383 create_bb, /* create_basic_block */
6384 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
6385 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
6386 tree_can_remove_branch_p, /* can_remove_branch_p */
6387 remove_bb, /* delete_basic_block */
6388 tree_split_block, /* split_block */
6389 tree_move_block_after, /* move_block_after */
6390 tree_can_merge_blocks_p, /* can_merge_blocks_p */
6391 tree_merge_blocks, /* merge_blocks */
6392 tree_predict_edge, /* predict_edge */
6393 tree_predicted_by_p, /* predicted_by_p */
6394 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
6395 tree_duplicate_bb, /* duplicate_block */
6396 tree_split_edge, /* split_edge */
6397 tree_make_forwarder_block, /* make_forward_block */
6398 NULL, /* tidy_fallthru_edge */
6399 tree_block_ends_with_call_p, /* block_ends_with_call_p */
6400 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6401 tree_flow_call_edges_add, /* flow_call_edges_add */
6402 tree_execute_on_growing_pred, /* execute_on_growing_pred */
6403 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6404 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6405 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6406 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6407 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6408 flush_pending_stmts /* flush_pending_stmts */
6412 /* Split all critical edges. */
6414 static unsigned int
6415 split_critical_edges (void)
6417 basic_block bb;
6418 edge e;
6419 edge_iterator ei;
6421 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6422 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6423 mappings around the calls to split_edge. */
6424 start_recording_case_labels ();
6425 FOR_ALL_BB (bb)
6427 FOR_EACH_EDGE (e, ei, bb->succs)
6428 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6430 split_edge (e);
6433 end_recording_case_labels ();
6434 return 0;
6437 struct tree_opt_pass pass_split_crit_edges =
6439 "crited", /* name */
6440 NULL, /* gate */
6441 split_critical_edges, /* execute */
6442 NULL, /* sub */
6443 NULL, /* next */
6444 0, /* static_pass_number */
6445 TV_TREE_SPLIT_EDGES, /* tv_id */
6446 PROP_cfg, /* properties required */
6447 PROP_no_crit_edges, /* properties_provided */
6448 0, /* properties_destroyed */
6449 0, /* todo_flags_start */
6450 TODO_dump_func, /* todo_flags_finish */
6451 0 /* letter */
6455 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
6456 a temporary, make sure and register it to be renamed if necessary,
6457 and finally return the temporary. Put the statements to compute
6458 EXP before the current statement in BSI. */
6460 tree
6461 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
6463 tree t, new_stmt, orig_stmt;
6465 if (is_gimple_val (exp))
6466 return exp;
6468 t = make_rename_temp (type, NULL);
6469 new_stmt = build_gimple_modify_stmt (t, exp);
6471 orig_stmt = bsi_stmt (*bsi);
6472 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
6473 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
6475 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
6476 if (gimple_in_ssa_p (cfun))
6477 mark_symbols_for_renaming (new_stmt);
6479 return t;
6482 /* Build a ternary operation and gimplify it. Emit code before BSI.
6483 Return the gimple_val holding the result. */
6485 tree
6486 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
6487 tree type, tree a, tree b, tree c)
6489 tree ret;
6491 ret = fold_build3 (code, type, a, b, c);
6492 STRIP_NOPS (ret);
6494 return gimplify_val (bsi, type, ret);
6497 /* Build a binary operation and gimplify it. Emit code before BSI.
6498 Return the gimple_val holding the result. */
6500 tree
6501 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
6502 tree type, tree a, tree b)
6504 tree ret;
6506 ret = fold_build2 (code, type, a, b);
6507 STRIP_NOPS (ret);
6509 return gimplify_val (bsi, type, ret);
6512 /* Build a unary operation and gimplify it. Emit code before BSI.
6513 Return the gimple_val holding the result. */
6515 tree
6516 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
6517 tree a)
6519 tree ret;
6521 ret = fold_build1 (code, type, a);
6522 STRIP_NOPS (ret);
6524 return gimplify_val (bsi, type, ret);
6529 /* Emit return warnings. */
6531 static unsigned int
6532 execute_warn_function_return (void)
6534 #ifdef USE_MAPPED_LOCATION
6535 source_location location;
6536 #else
6537 location_t *locus;
6538 #endif
6539 tree last;
6540 edge e;
6541 edge_iterator ei;
6543 /* If we have a path to EXIT, then we do return. */
6544 if (TREE_THIS_VOLATILE (cfun->decl)
6545 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6547 #ifdef USE_MAPPED_LOCATION
6548 location = UNKNOWN_LOCATION;
6549 #else
6550 locus = NULL;
6551 #endif
6552 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6554 last = last_stmt (e->src);
6555 if (TREE_CODE (last) == RETURN_EXPR
6556 #ifdef USE_MAPPED_LOCATION
6557 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
6558 #else
6559 && (locus = EXPR_LOCUS (last)) != NULL)
6560 #endif
6561 break;
6563 #ifdef USE_MAPPED_LOCATION
6564 if (location == UNKNOWN_LOCATION)
6565 location = cfun->function_end_locus;
6566 warning (0, "%H%<noreturn%> function does return", &location);
6567 #else
6568 if (!locus)
6569 locus = &cfun->function_end_locus;
6570 warning (0, "%H%<noreturn%> function does return", locus);
6571 #endif
6574 /* If we see "return;" in some basic block, then we do reach the end
6575 without returning a value. */
6576 else if (warn_return_type
6577 && !TREE_NO_WARNING (cfun->decl)
6578 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6579 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
6581 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6583 tree last = last_stmt (e->src);
6584 if (TREE_CODE (last) == RETURN_EXPR
6585 && TREE_OPERAND (last, 0) == NULL
6586 && !TREE_NO_WARNING (last))
6588 #ifdef USE_MAPPED_LOCATION
6589 location = EXPR_LOCATION (last);
6590 if (location == UNKNOWN_LOCATION)
6591 location = cfun->function_end_locus;
6592 warning (0, "%Hcontrol reaches end of non-void function", &location);
6593 #else
6594 locus = EXPR_LOCUS (last);
6595 if (!locus)
6596 locus = &cfun->function_end_locus;
6597 warning (0, "%Hcontrol reaches end of non-void function", locus);
6598 #endif
6599 TREE_NO_WARNING (cfun->decl) = 1;
6600 break;
6604 return 0;
6608 /* Given a basic block B which ends with a conditional and has
6609 precisely two successors, determine which of the edges is taken if
6610 the conditional is true and which is taken if the conditional is
6611 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
6613 void
6614 extract_true_false_edges_from_block (basic_block b,
6615 edge *true_edge,
6616 edge *false_edge)
6618 edge e = EDGE_SUCC (b, 0);
6620 if (e->flags & EDGE_TRUE_VALUE)
6622 *true_edge = e;
6623 *false_edge = EDGE_SUCC (b, 1);
6625 else
6627 *false_edge = e;
6628 *true_edge = EDGE_SUCC (b, 1);
6632 struct tree_opt_pass pass_warn_function_return =
6634 NULL, /* name */
6635 NULL, /* gate */
6636 execute_warn_function_return, /* execute */
6637 NULL, /* sub */
6638 NULL, /* next */
6639 0, /* static_pass_number */
6640 0, /* tv_id */
6641 PROP_cfg, /* properties_required */
6642 0, /* properties_provided */
6643 0, /* properties_destroyed */
6644 0, /* todo_flags_start */
6645 0, /* todo_flags_finish */
6646 0 /* letter */
6649 /* Emit noreturn warnings. */
6651 static unsigned int
6652 execute_warn_function_noreturn (void)
6654 if (warn_missing_noreturn
6655 && !TREE_THIS_VOLATILE (cfun->decl)
6656 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
6657 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
6658 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
6659 "for attribute %<noreturn%>",
6660 cfun->decl);
6661 return 0;
6664 struct tree_opt_pass pass_warn_function_noreturn =
6666 NULL, /* name */
6667 NULL, /* gate */
6668 execute_warn_function_noreturn, /* execute */
6669 NULL, /* sub */
6670 NULL, /* next */
6671 0, /* static_pass_number */
6672 0, /* tv_id */
6673 PROP_cfg, /* properties_required */
6674 0, /* properties_provided */
6675 0, /* properties_destroyed */
6676 0, /* todo_flags_start */
6677 0, /* todo_flags_finish */
6678 0 /* letter */