* gcc.dg/compat/struct-layout-1_generate.c (dg_options): New. Moved
[official-gcc.git] / gcc / tree-cfg.c
blobe9f315c53a2b8ab99199f87064ecfe3f7173bf9e
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static void make_blocks (gimple_seq);
87 static void factor_computed_gotos (void);
89 /* Edges. */
90 static void make_edges (void);
91 static void make_cond_expr_edges (basic_block);
92 static void make_gimple_switch_edges (basic_block);
93 static void make_goto_expr_edges (basic_block);
94 static edge gimple_redirect_edge_and_branch (edge, basic_block);
95 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
96 static unsigned int split_critical_edges (void);
98 /* Various helpers. */
99 static inline bool stmt_starts_bb_p (gimple, gimple);
100 static int gimple_verify_flow_info (void);
101 static void gimple_make_forwarder_block (edge);
102 static void gimple_cfg2vcg (FILE *);
104 /* Flowgraph optimization and cleanup. */
105 static void gimple_merge_blocks (basic_block, basic_block);
106 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
107 static void remove_bb (basic_block);
108 static edge find_taken_edge_computed_goto (basic_block, tree);
109 static edge find_taken_edge_cond_expr (basic_block, tree);
110 static edge find_taken_edge_switch_expr (basic_block, tree);
111 static tree find_case_label_for_value (gimple, tree);
113 void
114 init_empty_tree_cfg_for_function (struct function *fn)
116 /* Initialize the basic block array. */
117 init_flow (fn);
118 profile_status_for_function (fn) = PROFILE_ABSENT;
119 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
120 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
121 basic_block_info_for_function (fn)
122 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
123 VEC_safe_grow_cleared (basic_block, gc,
124 basic_block_info_for_function (fn),
125 initial_cfg_capacity);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map_for_function (fn)
129 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
130 VEC_safe_grow_cleared (basic_block, gc,
131 label_to_block_map_for_function (fn),
132 initial_cfg_capacity);
134 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
135 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
136 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
137 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
139 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
140 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
141 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
142 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
145 void
146 init_empty_tree_cfg (void)
148 init_empty_tree_cfg_for_function (cfun);
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
155 /* Entry point to the CFG builder for trees. SEQ is the sequence of
156 statements to be added to the flowgraph. */
158 static void
159 build_gimple_cfg (gimple_seq seq)
161 /* Register specific gimple functions. */
162 gimple_register_cfg_hooks ();
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
166 init_empty_tree_cfg ();
168 found_computed_goto = 0;
169 make_blocks (seq);
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
195 /* Create the edges of the flowgraph. */
196 make_edges ();
197 cleanup_dead_labels ();
199 /* Debugging dumps. */
201 /* Write the flowgraph to a VCG file. */
203 int local_dump_flags;
204 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
205 if (vcg_file)
207 gimple_cfg2vcg (vcg_file);
208 dump_end (TDI_vcg, vcg_file);
212 #ifdef ENABLE_CHECKING
213 verify_stmts ();
214 #endif
217 static unsigned int
218 execute_build_cfg (void)
220 gimple_seq body = gimple_body (current_function_decl);
222 build_gimple_cfg (body);
223 gimple_set_body (current_function_decl, NULL);
224 return 0;
227 struct gimple_opt_pass pass_build_cfg =
230 GIMPLE_PASS,
231 "cfg", /* name */
232 NULL, /* gate */
233 execute_build_cfg, /* execute */
234 NULL, /* sub */
235 NULL, /* next */
236 0, /* static_pass_number */
237 TV_TREE_CFG, /* tv_id */
238 PROP_gimple_leh, /* properties_required */
239 PROP_cfg, /* properties_provided */
240 0, /* properties_destroyed */
241 0, /* todo_flags_start */
242 TODO_verify_stmts | TODO_cleanup_cfg
243 | TODO_dump_func /* todo_flags_finish */
248 /* Return true if T is a computed goto. */
250 static bool
251 computed_goto_p (gimple t)
253 return (gimple_code (t) == GIMPLE_GOTO
254 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
258 /* Search the CFG for any computed gotos. If found, factor them to a
259 common computed goto site. Also record the location of that site so
260 that we can un-factor the gotos after we have converted back to
261 normal form. */
263 static void
264 factor_computed_gotos (void)
266 basic_block bb;
267 tree factored_label_decl = NULL;
268 tree var = NULL;
269 gimple factored_computed_goto_label = NULL;
270 gimple factored_computed_goto = NULL;
272 /* We know there are one or more computed gotos in this function.
273 Examine the last statement in each basic block to see if the block
274 ends with a computed goto. */
276 FOR_EACH_BB (bb)
278 gimple_stmt_iterator gsi = gsi_last_bb (bb);
279 gimple last;
281 if (gsi_end_p (gsi))
282 continue;
284 last = gsi_stmt (gsi);
286 /* Ignore the computed goto we create when we factor the original
287 computed gotos. */
288 if (last == factored_computed_goto)
289 continue;
291 /* If the last statement is a computed goto, factor it. */
292 if (computed_goto_p (last))
294 gimple assignment;
296 /* The first time we find a computed goto we need to create
297 the factored goto block and the variable each original
298 computed goto will use for their goto destination. */
299 if (!factored_computed_goto)
301 basic_block new_bb = create_empty_bb (bb);
302 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
304 /* Create the destination of the factored goto. Each original
305 computed goto will put its desired destination into this
306 variable and jump to the label we create immediately
307 below. */
308 var = create_tmp_var (ptr_type_node, "gotovar");
310 /* Build a label for the new block which will contain the
311 factored computed goto. */
312 factored_label_decl = create_artificial_label ();
313 factored_computed_goto_label
314 = gimple_build_label (factored_label_decl);
315 gsi_insert_after (&new_gsi, factored_computed_goto_label,
316 GSI_NEW_STMT);
318 /* Build our new computed goto. */
319 factored_computed_goto = gimple_build_goto (var);
320 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
323 /* Copy the original computed goto's destination into VAR. */
324 assignment = gimple_build_assign (var, gimple_goto_dest (last));
325 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
327 /* And re-vector the computed goto to the new destination. */
328 gimple_goto_set_dest (last, factored_label_decl);
334 /* Build a flowgraph for the sequence of stmts SEQ. */
336 static void
337 make_blocks (gimple_seq seq)
339 gimple_stmt_iterator i = gsi_start (seq);
340 gimple stmt = NULL;
341 bool start_new_block = true;
342 bool first_stmt_of_seq = true;
343 basic_block bb = ENTRY_BLOCK_PTR;
345 while (!gsi_end_p (i))
347 gimple prev_stmt;
349 prev_stmt = stmt;
350 stmt = gsi_stmt (i);
352 /* If the statement starts a new basic block or if we have determined
353 in a previous pass that we need to create a new block for STMT, do
354 so now. */
355 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
357 if (!first_stmt_of_seq)
358 seq = gsi_split_seq_before (&i);
359 bb = create_basic_block (seq, NULL, bb);
360 start_new_block = false;
363 /* Now add STMT to BB and create the subgraphs for special statement
364 codes. */
365 gimple_set_bb (stmt, bb);
367 if (computed_goto_p (stmt))
368 found_computed_goto = true;
370 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
371 next iteration. */
372 if (stmt_ends_bb_p (stmt))
373 start_new_block = true;
375 gsi_next (&i);
376 first_stmt_of_seq = false;
381 /* Create and return a new empty basic block after bb AFTER. */
383 static basic_block
384 create_bb (void *h, void *e, basic_block after)
386 basic_block bb;
388 gcc_assert (!e);
390 /* Create and initialize a new basic block. Since alloc_block uses
391 ggc_alloc_cleared to allocate a basic block, we do not have to
392 clear the newly allocated basic block here. */
393 bb = alloc_block ();
395 bb->index = last_basic_block;
396 bb->flags = BB_NEW;
397 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
398 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
400 /* Add the new block to the linked list of blocks. */
401 link_block (bb, after);
403 /* Grow the basic block array if needed. */
404 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
406 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
407 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
410 /* Add the newly created block to the array. */
411 SET_BASIC_BLOCK (last_basic_block, bb);
413 n_basic_blocks++;
414 last_basic_block++;
416 return bb;
420 /*---------------------------------------------------------------------------
421 Edge creation
422 ---------------------------------------------------------------------------*/
424 /* Fold COND_EXPR_COND of each COND_EXPR. */
426 void
427 fold_cond_expr_cond (void)
429 basic_block bb;
431 FOR_EACH_BB (bb)
433 gimple stmt = last_stmt (bb);
435 if (stmt && gimple_code (stmt) == GIMPLE_COND)
437 tree cond;
438 bool zerop, onep;
440 fold_defer_overflow_warnings ();
441 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
442 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
443 if (cond)
445 zerop = integer_zerop (cond);
446 onep = integer_onep (cond);
448 else
449 zerop = onep = false;
451 fold_undefer_overflow_warnings (zerop || onep,
452 stmt,
453 WARN_STRICT_OVERFLOW_CONDITIONAL);
454 if (zerop)
455 gimple_cond_make_false (stmt);
456 else if (onep)
457 gimple_cond_make_true (stmt);
462 /* Join all the blocks in the flowgraph. */
464 static void
465 make_edges (void)
467 basic_block bb;
468 struct omp_region *cur_region = NULL;
470 /* Create an edge from entry to the first block with executable
471 statements in it. */
472 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
474 /* Traverse the basic block array placing edges. */
475 FOR_EACH_BB (bb)
477 gimple last = last_stmt (bb);
478 bool fallthru;
480 if (last)
482 enum gimple_code code = gimple_code (last);
483 switch (code)
485 case GIMPLE_GOTO:
486 make_goto_expr_edges (bb);
487 fallthru = false;
488 break;
489 case GIMPLE_RETURN:
490 make_edge (bb, EXIT_BLOCK_PTR, 0);
491 fallthru = false;
492 break;
493 case GIMPLE_COND:
494 make_cond_expr_edges (bb);
495 fallthru = false;
496 break;
497 case GIMPLE_SWITCH:
498 make_gimple_switch_edges (bb);
499 fallthru = false;
500 break;
501 case GIMPLE_RESX:
502 make_eh_edges (last);
503 fallthru = false;
504 break;
506 case GIMPLE_CALL:
507 /* If this function receives a nonlocal goto, then we need to
508 make edges from this call site to all the nonlocal goto
509 handlers. */
510 if (stmt_can_make_abnormal_goto (last))
511 make_abnormal_goto_edges (bb, true);
513 /* If this statement has reachable exception handlers, then
514 create abnormal edges to them. */
515 make_eh_edges (last);
517 /* Some calls are known not to return. */
518 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
519 break;
521 case GIMPLE_ASSIGN:
522 /* A GIMPLE_ASSIGN may throw internally and thus be considered
523 control-altering. */
524 if (is_ctrl_altering_stmt (last))
526 make_eh_edges (last);
528 fallthru = true;
529 break;
531 case GIMPLE_OMP_PARALLEL:
532 case GIMPLE_OMP_TASK:
533 case GIMPLE_OMP_FOR:
534 case GIMPLE_OMP_SINGLE:
535 case GIMPLE_OMP_MASTER:
536 case GIMPLE_OMP_ORDERED:
537 case GIMPLE_OMP_CRITICAL:
538 case GIMPLE_OMP_SECTION:
539 cur_region = new_omp_region (bb, code, cur_region);
540 fallthru = true;
541 break;
543 case GIMPLE_OMP_SECTIONS:
544 cur_region = new_omp_region (bb, code, cur_region);
545 fallthru = true;
546 break;
548 case GIMPLE_OMP_SECTIONS_SWITCH:
549 fallthru = false;
550 break;
553 case GIMPLE_OMP_ATOMIC_LOAD:
554 case GIMPLE_OMP_ATOMIC_STORE:
555 fallthru = true;
556 break;
559 case GIMPLE_OMP_RETURN:
560 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
561 somewhere other than the next block. This will be
562 created later. */
563 cur_region->exit = bb;
564 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
565 cur_region = cur_region->outer;
566 break;
568 case GIMPLE_OMP_CONTINUE:
569 cur_region->cont = bb;
570 switch (cur_region->type)
572 case GIMPLE_OMP_FOR:
573 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
574 succs edges as abnormal to prevent splitting
575 them. */
576 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
577 /* Make the loopback edge. */
578 make_edge (bb, single_succ (cur_region->entry),
579 EDGE_ABNORMAL);
581 /* Create an edge from GIMPLE_OMP_FOR to exit, which
582 corresponds to the case that the body of the loop
583 is not executed at all. */
584 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
585 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
586 fallthru = false;
587 break;
589 case GIMPLE_OMP_SECTIONS:
590 /* Wire up the edges into and out of the nested sections. */
592 basic_block switch_bb = single_succ (cur_region->entry);
594 struct omp_region *i;
595 for (i = cur_region->inner; i ; i = i->next)
597 gcc_assert (i->type == GIMPLE_OMP_SECTION);
598 make_edge (switch_bb, i->entry, 0);
599 make_edge (i->exit, bb, EDGE_FALLTHRU);
602 /* Make the loopback edge to the block with
603 GIMPLE_OMP_SECTIONS_SWITCH. */
604 make_edge (bb, switch_bb, 0);
606 /* Make the edge from the switch to exit. */
607 make_edge (switch_bb, bb->next_bb, 0);
608 fallthru = false;
610 break;
612 default:
613 gcc_unreachable ();
615 break;
617 default:
618 gcc_assert (!stmt_ends_bb_p (last));
619 fallthru = true;
622 else
623 fallthru = true;
625 if (fallthru)
626 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
629 if (root_omp_region)
630 free_omp_regions ();
632 /* Fold COND_EXPR_COND of each COND_EXPR. */
633 fold_cond_expr_cond ();
637 /* Create the edges for a GIMPLE_COND starting at block BB. */
639 static void
640 make_cond_expr_edges (basic_block bb)
642 gimple entry = last_stmt (bb);
643 gimple then_stmt, else_stmt;
644 basic_block then_bb, else_bb;
645 tree then_label, else_label;
646 edge e;
648 gcc_assert (entry);
649 gcc_assert (gimple_code (entry) == GIMPLE_COND);
651 /* Entry basic blocks for each component. */
652 then_label = gimple_cond_true_label (entry);
653 else_label = gimple_cond_false_label (entry);
654 then_bb = label_to_block (then_label);
655 else_bb = label_to_block (else_label);
656 then_stmt = first_stmt (then_bb);
657 else_stmt = first_stmt (else_bb);
659 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
660 e->goto_locus = gimple_location (then_stmt);
661 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
662 if (e)
663 e->goto_locus = gimple_location (else_stmt);
665 /* We do not need the labels anymore. */
666 gimple_cond_set_true_label (entry, NULL_TREE);
667 gimple_cond_set_false_label (entry, NULL_TREE);
671 /* Called for each element in the hash table (P) as we delete the
672 edge to cases hash table.
674 Clear all the TREE_CHAINs to prevent problems with copying of
675 SWITCH_EXPRs and structure sharing rules, then free the hash table
676 element. */
678 static bool
679 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
680 void *data ATTRIBUTE_UNUSED)
682 tree t, next;
684 for (t = (tree) *value; t; t = next)
686 next = TREE_CHAIN (t);
687 TREE_CHAIN (t) = NULL;
690 *value = NULL;
691 return false;
694 /* Start recording information mapping edges to case labels. */
696 void
697 start_recording_case_labels (void)
699 gcc_assert (edge_to_cases == NULL);
700 edge_to_cases = pointer_map_create ();
703 /* Return nonzero if we are recording information for case labels. */
705 static bool
706 recording_case_labels_p (void)
708 return (edge_to_cases != NULL);
711 /* Stop recording information mapping edges to case labels and
712 remove any information we have recorded. */
713 void
714 end_recording_case_labels (void)
716 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
717 pointer_map_destroy (edge_to_cases);
718 edge_to_cases = NULL;
721 /* If we are inside a {start,end}_recording_cases block, then return
722 a chain of CASE_LABEL_EXPRs from T which reference E.
724 Otherwise return NULL. */
726 static tree
727 get_cases_for_edge (edge e, gimple t)
729 void **slot;
730 size_t i, n;
732 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
733 chains available. Return NULL so the caller can detect this case. */
734 if (!recording_case_labels_p ())
735 return NULL;
737 slot = pointer_map_contains (edge_to_cases, e);
738 if (slot)
739 return (tree) *slot;
741 /* If we did not find E in the hash table, then this must be the first
742 time we have been queried for information about E & T. Add all the
743 elements from T to the hash table then perform the query again. */
745 n = gimple_switch_num_labels (t);
746 for (i = 0; i < n; i++)
748 tree elt = gimple_switch_label (t, i);
749 tree lab = CASE_LABEL (elt);
750 basic_block label_bb = label_to_block (lab);
751 edge this_edge = find_edge (e->src, label_bb);
753 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
754 a new chain. */
755 slot = pointer_map_insert (edge_to_cases, this_edge);
756 TREE_CHAIN (elt) = (tree) *slot;
757 *slot = elt;
760 return (tree) *pointer_map_contains (edge_to_cases, e);
763 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
765 static void
766 make_gimple_switch_edges (basic_block bb)
768 gimple entry = last_stmt (bb);
769 size_t i, n;
771 n = gimple_switch_num_labels (entry);
773 for (i = 0; i < n; ++i)
775 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
776 basic_block label_bb = label_to_block (lab);
777 make_edge (bb, label_bb, 0);
782 /* Return the basic block holding label DEST. */
784 basic_block
785 label_to_block_fn (struct function *ifun, tree dest)
787 int uid = LABEL_DECL_UID (dest);
789 /* We would die hard when faced by an undefined label. Emit a label to
790 the very first basic block. This will hopefully make even the dataflow
791 and undefined variable warnings quite right. */
792 if ((errorcount || sorrycount) && uid < 0)
794 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
795 gimple stmt;
797 stmt = gimple_build_label (dest);
798 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
799 uid = LABEL_DECL_UID (dest);
801 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
802 <= (unsigned int) uid)
803 return NULL;
804 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
807 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
808 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
810 void
811 make_abnormal_goto_edges (basic_block bb, bool for_call)
813 basic_block target_bb;
814 gimple_stmt_iterator gsi;
816 FOR_EACH_BB (target_bb)
817 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
819 gimple label_stmt = gsi_stmt (gsi);
820 tree target;
822 if (gimple_code (label_stmt) != GIMPLE_LABEL)
823 break;
825 target = gimple_label_label (label_stmt);
827 /* Make an edge to every label block that has been marked as a
828 potential target for a computed goto or a non-local goto. */
829 if ((FORCED_LABEL (target) && !for_call)
830 || (DECL_NONLOCAL (target) && for_call))
832 make_edge (bb, target_bb, EDGE_ABNORMAL);
833 break;
838 /* Create edges for a goto statement at block BB. */
840 static void
841 make_goto_expr_edges (basic_block bb)
843 gimple_stmt_iterator last = gsi_last_bb (bb);
844 gimple goto_t = gsi_stmt (last);
846 /* A simple GOTO creates normal edges. */
847 if (simple_goto_p (goto_t))
849 tree dest = gimple_goto_dest (goto_t);
850 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
851 e->goto_locus = gimple_location (goto_t);
852 gsi_remove (&last, true);
853 return;
856 /* A computed GOTO creates abnormal edges. */
857 make_abnormal_goto_edges (bb, false);
861 /*---------------------------------------------------------------------------
862 Flowgraph analysis
863 ---------------------------------------------------------------------------*/
865 /* Cleanup useless labels in basic blocks. This is something we wish
866 to do early because it allows us to group case labels before creating
867 the edges for the CFG, and it speeds up block statement iterators in
868 all passes later on.
869 We rerun this pass after CFG is created, to get rid of the labels that
870 are no longer referenced. After then we do not run it any more, since
871 (almost) no new labels should be created. */
873 /* A map from basic block index to the leading label of that block. */
874 static struct label_record
876 /* The label. */
877 tree label;
879 /* True if the label is referenced from somewhere. */
880 bool used;
881 } *label_for_bb;
883 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
884 static void
885 update_eh_label (struct eh_region *region)
887 tree old_label = get_eh_region_tree_label (region);
888 if (old_label)
890 tree new_label;
891 basic_block bb = label_to_block (old_label);
893 /* ??? After optimizing, there may be EH regions with labels
894 that have already been removed from the function body, so
895 there is no basic block for them. */
896 if (! bb)
897 return;
899 new_label = label_for_bb[bb->index].label;
900 label_for_bb[bb->index].used = true;
901 set_eh_region_tree_label (region, new_label);
906 /* Given LABEL return the first label in the same basic block. */
908 static tree
909 main_block_label (tree label)
911 basic_block bb = label_to_block (label);
912 tree main_label = label_for_bb[bb->index].label;
914 /* label_to_block possibly inserted undefined label into the chain. */
915 if (!main_label)
917 label_for_bb[bb->index].label = label;
918 main_label = label;
921 label_for_bb[bb->index].used = true;
922 return main_label;
925 /* Cleanup redundant labels. This is a three-step process:
926 1) Find the leading label for each block.
927 2) Redirect all references to labels to the leading labels.
928 3) Cleanup all useless labels. */
930 void
931 cleanup_dead_labels (void)
933 basic_block bb;
934 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
936 /* Find a suitable label for each block. We use the first user-defined
937 label if there is one, or otherwise just the first label we see. */
938 FOR_EACH_BB (bb)
940 gimple_stmt_iterator i;
942 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
944 tree label;
945 gimple stmt = gsi_stmt (i);
947 if (gimple_code (stmt) != GIMPLE_LABEL)
948 break;
950 label = gimple_label_label (stmt);
952 /* If we have not yet seen a label for the current block,
953 remember this one and see if there are more labels. */
954 if (!label_for_bb[bb->index].label)
956 label_for_bb[bb->index].label = label;
957 continue;
960 /* If we did see a label for the current block already, but it
961 is an artificially created label, replace it if the current
962 label is a user defined label. */
963 if (!DECL_ARTIFICIAL (label)
964 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
966 label_for_bb[bb->index].label = label;
967 break;
972 /* Now redirect all jumps/branches to the selected label.
973 First do so for each block ending in a control statement. */
974 FOR_EACH_BB (bb)
976 gimple stmt = last_stmt (bb);
977 if (!stmt)
978 continue;
980 switch (gimple_code (stmt))
982 case GIMPLE_COND:
984 tree true_label = gimple_cond_true_label (stmt);
985 tree false_label = gimple_cond_false_label (stmt);
987 if (true_label)
988 gimple_cond_set_true_label (stmt, main_block_label (true_label));
989 if (false_label)
990 gimple_cond_set_false_label (stmt, main_block_label (false_label));
991 break;
994 case GIMPLE_SWITCH:
996 size_t i, n = gimple_switch_num_labels (stmt);
998 /* Replace all destination labels. */
999 for (i = 0; i < n; ++i)
1001 tree case_label = gimple_switch_label (stmt, i);
1002 tree label = main_block_label (CASE_LABEL (case_label));
1003 CASE_LABEL (case_label) = label;
1005 break;
1008 /* We have to handle gotos until they're removed, and we don't
1009 remove them until after we've created the CFG edges. */
1010 case GIMPLE_GOTO:
1011 if (!computed_goto_p (stmt))
1013 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1014 gimple_goto_set_dest (stmt, new_dest);
1015 break;
1018 default:
1019 break;
1023 for_each_eh_region (update_eh_label);
1025 /* Finally, purge dead labels. All user-defined labels and labels that
1026 can be the target of non-local gotos and labels which have their
1027 address taken are preserved. */
1028 FOR_EACH_BB (bb)
1030 gimple_stmt_iterator i;
1031 tree label_for_this_bb = label_for_bb[bb->index].label;
1033 if (!label_for_this_bb)
1034 continue;
1036 /* If the main label of the block is unused, we may still remove it. */
1037 if (!label_for_bb[bb->index].used)
1038 label_for_this_bb = NULL;
1040 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1042 tree label;
1043 gimple stmt = gsi_stmt (i);
1045 if (gimple_code (stmt) != GIMPLE_LABEL)
1046 break;
1048 label = gimple_label_label (stmt);
1050 if (label == label_for_this_bb
1051 || !DECL_ARTIFICIAL (label)
1052 || DECL_NONLOCAL (label)
1053 || FORCED_LABEL (label))
1054 gsi_next (&i);
1055 else
1056 gsi_remove (&i, true);
1060 free (label_for_bb);
1063 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1064 and scan the sorted vector of cases. Combine the ones jumping to the
1065 same label.
1066 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1068 void
1069 group_case_labels (void)
1071 basic_block bb;
1073 FOR_EACH_BB (bb)
1075 gimple stmt = last_stmt (bb);
1076 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1078 int old_size = gimple_switch_num_labels (stmt);
1079 int i, j, new_size = old_size;
1080 tree default_case = NULL_TREE;
1081 tree default_label = NULL_TREE;
1082 bool has_default;
1084 /* The default label is always the first case in a switch
1085 statement after gimplification if it was not optimized
1086 away */
1087 if (!CASE_LOW (gimple_switch_default_label (stmt))
1088 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1090 default_case = gimple_switch_default_label (stmt);
1091 default_label = CASE_LABEL (default_case);
1092 has_default = true;
1094 else
1095 has_default = false;
1097 /* Look for possible opportunities to merge cases. */
1098 if (has_default)
1099 i = 1;
1100 else
1101 i = 0;
1102 while (i < old_size)
1104 tree base_case, base_label, base_high;
1105 base_case = gimple_switch_label (stmt, i);
1107 gcc_assert (base_case);
1108 base_label = CASE_LABEL (base_case);
1110 /* Discard cases that have the same destination as the
1111 default case. */
1112 if (base_label == default_label)
1114 gimple_switch_set_label (stmt, i, NULL_TREE);
1115 i++;
1116 new_size--;
1117 continue;
1120 base_high = CASE_HIGH (base_case)
1121 ? CASE_HIGH (base_case)
1122 : CASE_LOW (base_case);
1123 i++;
1125 /* Try to merge case labels. Break out when we reach the end
1126 of the label vector or when we cannot merge the next case
1127 label with the current one. */
1128 while (i < old_size)
1130 tree merge_case = gimple_switch_label (stmt, i);
1131 tree merge_label = CASE_LABEL (merge_case);
1132 tree t = int_const_binop (PLUS_EXPR, base_high,
1133 integer_one_node, 1);
1135 /* Merge the cases if they jump to the same place,
1136 and their ranges are consecutive. */
1137 if (merge_label == base_label
1138 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1140 base_high = CASE_HIGH (merge_case) ?
1141 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1142 CASE_HIGH (base_case) = base_high;
1143 gimple_switch_set_label (stmt, i, NULL_TREE);
1144 new_size--;
1145 i++;
1147 else
1148 break;
1152 /* Compress the case labels in the label vector, and adjust the
1153 length of the vector. */
1154 for (i = 0, j = 0; i < new_size; i++)
1156 while (! gimple_switch_label (stmt, j))
1157 j++;
1158 gimple_switch_set_label (stmt, i,
1159 gimple_switch_label (stmt, j++));
1162 gcc_assert (new_size <= old_size);
1163 gimple_switch_set_num_labels (stmt, new_size);
1168 /* Checks whether we can merge block B into block A. */
1170 static bool
1171 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1173 gimple stmt;
1174 gimple_stmt_iterator gsi;
1175 gimple_seq phis;
1177 if (!single_succ_p (a))
1178 return false;
1180 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1181 return false;
1183 if (single_succ (a) != b)
1184 return false;
1186 if (!single_pred_p (b))
1187 return false;
1189 if (b == EXIT_BLOCK_PTR)
1190 return false;
1192 /* If A ends by a statement causing exceptions or something similar, we
1193 cannot merge the blocks. */
1194 stmt = last_stmt (a);
1195 if (stmt && stmt_ends_bb_p (stmt))
1196 return false;
1198 /* Do not allow a block with only a non-local label to be merged. */
1199 if (stmt
1200 && gimple_code (stmt) == GIMPLE_LABEL
1201 && DECL_NONLOCAL (gimple_label_label (stmt)))
1202 return false;
1204 /* It must be possible to eliminate all phi nodes in B. If ssa form
1205 is not up-to-date, we cannot eliminate any phis; however, if only
1206 some symbols as whole are marked for renaming, this is not a problem,
1207 as phi nodes for those symbols are irrelevant in updating anyway. */
1208 phis = phi_nodes (b);
1209 if (!gimple_seq_empty_p (phis))
1211 gimple_stmt_iterator i;
1213 if (name_mappings_registered_p ())
1214 return false;
1216 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1218 gimple phi = gsi_stmt (i);
1220 if (!is_gimple_reg (gimple_phi_result (phi))
1221 && !may_propagate_copy (gimple_phi_result (phi),
1222 gimple_phi_arg_def (phi, 0)))
1223 return false;
1227 /* Do not remove user labels. */
1228 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1230 stmt = gsi_stmt (gsi);
1231 if (gimple_code (stmt) != GIMPLE_LABEL)
1232 break;
1233 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1234 return false;
1237 /* Protect the loop latches. */
1238 if (current_loops
1239 && b->loop_father->latch == b)
1240 return false;
1242 return true;
1245 /* Replaces all uses of NAME by VAL. */
1247 void
1248 replace_uses_by (tree name, tree val)
1250 imm_use_iterator imm_iter;
1251 use_operand_p use;
1252 gimple stmt;
1253 edge e;
1255 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1257 if (gimple_code (stmt) != GIMPLE_PHI)
1258 push_stmt_changes (&stmt);
1260 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1262 replace_exp (use, val);
1264 if (gimple_code (stmt) == GIMPLE_PHI)
1266 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1267 if (e->flags & EDGE_ABNORMAL)
1269 /* This can only occur for virtual operands, since
1270 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1271 would prevent replacement. */
1272 gcc_assert (!is_gimple_reg (name));
1273 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1278 if (gimple_code (stmt) != GIMPLE_PHI)
1280 size_t i;
1282 fold_stmt_inplace (stmt);
1283 if (cfgcleanup_altered_bbs)
1284 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1286 /* FIXME. This should go in pop_stmt_changes. */
1287 for (i = 0; i < gimple_num_ops (stmt); i++)
1289 tree op = gimple_op (stmt, i);
1290 /* Operands may be empty here. For example, the labels
1291 of a GIMPLE_COND are nulled out following the creation
1292 of the corresponding CFG edges. */
1293 if (op && TREE_CODE (op) == ADDR_EXPR)
1294 recompute_tree_invariant_for_addr_expr (op);
1297 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1299 pop_stmt_changes (&stmt);
1303 gcc_assert (has_zero_uses (name));
1305 /* Also update the trees stored in loop structures. */
1306 if (current_loops)
1308 struct loop *loop;
1309 loop_iterator li;
1311 FOR_EACH_LOOP (li, loop, 0)
1313 substitute_in_loop_info (loop, name, val);
1318 /* Merge block B into block A. */
1320 static void
1321 gimple_merge_blocks (basic_block a, basic_block b)
1323 gimple_stmt_iterator last, gsi, psi;
1324 gimple_seq phis = phi_nodes (b);
1326 if (dump_file)
1327 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1329 /* Remove all single-valued PHI nodes from block B of the form
1330 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1331 gsi = gsi_last_bb (a);
1332 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1334 gimple phi = gsi_stmt (psi);
1335 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1336 gimple copy;
1337 bool may_replace_uses = !is_gimple_reg (def)
1338 || may_propagate_copy (def, use);
1340 /* In case we maintain loop closed ssa form, do not propagate arguments
1341 of loop exit phi nodes. */
1342 if (current_loops
1343 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1344 && is_gimple_reg (def)
1345 && TREE_CODE (use) == SSA_NAME
1346 && a->loop_father != b->loop_father)
1347 may_replace_uses = false;
1349 if (!may_replace_uses)
1351 gcc_assert (is_gimple_reg (def));
1353 /* Note that just emitting the copies is fine -- there is no problem
1354 with ordering of phi nodes. This is because A is the single
1355 predecessor of B, therefore results of the phi nodes cannot
1356 appear as arguments of the phi nodes. */
1357 copy = gimple_build_assign (def, use);
1358 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1359 remove_phi_node (&psi, false);
1361 else
1363 /* If we deal with a PHI for virtual operands, we can simply
1364 propagate these without fussing with folding or updating
1365 the stmt. */
1366 if (!is_gimple_reg (def))
1368 imm_use_iterator iter;
1369 use_operand_p use_p;
1370 gimple stmt;
1372 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1373 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1374 SET_USE (use_p, use);
1376 else
1377 replace_uses_by (def, use);
1379 remove_phi_node (&psi, true);
1383 /* Ensure that B follows A. */
1384 move_block_after (b, a);
1386 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1387 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1389 /* Remove labels from B and set gimple_bb to A for other statements. */
1390 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1392 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1394 gimple label = gsi_stmt (gsi);
1396 gsi_remove (&gsi, false);
1398 /* Now that we can thread computed gotos, we might have
1399 a situation where we have a forced label in block B
1400 However, the label at the start of block B might still be
1401 used in other ways (think about the runtime checking for
1402 Fortran assigned gotos). So we can not just delete the
1403 label. Instead we move the label to the start of block A. */
1404 if (FORCED_LABEL (gimple_label_label (label)))
1406 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1407 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1410 else
1412 gimple_set_bb (gsi_stmt (gsi), a);
1413 gsi_next (&gsi);
1417 /* Merge the sequences. */
1418 last = gsi_last_bb (a);
1419 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1420 set_bb_seq (b, NULL);
1422 if (cfgcleanup_altered_bbs)
1423 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1427 /* Return the one of two successors of BB that is not reachable by a
1428 reached by a complex edge, if there is one. Else, return BB. We use
1429 this in optimizations that use post-dominators for their heuristics,
1430 to catch the cases in C++ where function calls are involved. */
1432 basic_block
1433 single_noncomplex_succ (basic_block bb)
1435 edge e0, e1;
1436 if (EDGE_COUNT (bb->succs) != 2)
1437 return bb;
1439 e0 = EDGE_SUCC (bb, 0);
1440 e1 = EDGE_SUCC (bb, 1);
1441 if (e0->flags & EDGE_COMPLEX)
1442 return e1->dest;
1443 if (e1->flags & EDGE_COMPLEX)
1444 return e0->dest;
1446 return bb;
1450 /* Walk the function tree removing unnecessary statements.
1452 * Empty statement nodes are removed
1454 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1456 * Unnecessary COND_EXPRs are removed
1458 * Some unnecessary BIND_EXPRs are removed
1460 * GOTO_EXPRs immediately preceding destination are removed.
1462 Clearly more work could be done. The trick is doing the analysis
1463 and removal fast enough to be a net improvement in compile times.
1465 Note that when we remove a control structure such as a COND_EXPR
1466 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1467 to ensure we eliminate all the useless code. */
1469 struct rus_data
1471 bool repeat;
1472 bool may_throw;
1473 bool may_branch;
1474 bool has_label;
1475 bool last_was_goto;
1476 gimple_stmt_iterator last_goto_gsi;
1480 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1482 /* Given a statement sequence, find the first executable statement with
1483 location information, and warn that it is unreachable. When searching,
1484 descend into containers in execution order. */
1486 static bool
1487 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1489 gimple_stmt_iterator gsi;
1491 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1493 gimple stmt = gsi_stmt (gsi);
1495 if (gimple_has_location (stmt))
1497 location_t loc = gimple_location (stmt);
1498 if (LOCATION_LINE (loc) > 0)
1500 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1501 return true;
1505 switch (gimple_code (stmt))
1507 /* Unfortunately, we need the CFG now to detect unreachable
1508 branches in a conditional, so conditionals are not handled here. */
1510 case GIMPLE_TRY:
1511 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1512 return true;
1513 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1514 return true;
1515 break;
1517 case GIMPLE_CATCH:
1518 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1520 case GIMPLE_EH_FILTER:
1521 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1523 case GIMPLE_BIND:
1524 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1526 default:
1527 break;
1531 return false;
1534 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1536 static void
1537 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1539 gimple stmt = gsi_stmt (*gsi);
1541 /* The folded result must still be a conditional statement. */
1542 fold_stmt_inplace (stmt);
1544 data->may_branch = true;
1546 /* Replace trivial conditionals with gotos. */
1547 if (gimple_cond_true_p (stmt))
1549 /* Goto THEN label. */
1550 tree then_label = gimple_cond_true_label (stmt);
1552 gsi_replace (gsi, gimple_build_goto (then_label), false);
1553 data->last_goto_gsi = *gsi;
1554 data->last_was_goto = true;
1555 data->repeat = true;
1557 else if (gimple_cond_false_p (stmt))
1559 /* Goto ELSE label. */
1560 tree else_label = gimple_cond_false_label (stmt);
1562 gsi_replace (gsi, gimple_build_goto (else_label), false);
1563 data->last_goto_gsi = *gsi;
1564 data->last_was_goto = true;
1565 data->repeat = true;
1567 else
1569 tree then_label = gimple_cond_true_label (stmt);
1570 tree else_label = gimple_cond_false_label (stmt);
1572 if (then_label == else_label)
1574 /* Goto common destination. */
1575 gsi_replace (gsi, gimple_build_goto (then_label), false);
1576 data->last_goto_gsi = *gsi;
1577 data->last_was_goto = true;
1578 data->repeat = true;
1582 gsi_next (gsi);
1584 data->last_was_goto = false;
1587 /* Helper for remove_useless_stmts_1.
1588 Handle the try-finally case for GIMPLE_TRY statements. */
1590 static void
1591 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1593 bool save_may_branch, save_may_throw;
1594 bool this_may_branch, this_may_throw;
1596 gimple_seq eval_seq, cleanup_seq;
1597 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1599 gimple stmt = gsi_stmt (*gsi);
1601 /* Collect may_branch and may_throw information for the body only. */
1602 save_may_branch = data->may_branch;
1603 save_may_throw = data->may_throw;
1604 data->may_branch = false;
1605 data->may_throw = false;
1606 data->last_was_goto = false;
1608 eval_seq = gimple_try_eval (stmt);
1609 eval_gsi = gsi_start (eval_seq);
1610 remove_useless_stmts_1 (&eval_gsi, data);
1612 this_may_branch = data->may_branch;
1613 this_may_throw = data->may_throw;
1614 data->may_branch |= save_may_branch;
1615 data->may_throw |= save_may_throw;
1616 data->last_was_goto = false;
1618 cleanup_seq = gimple_try_cleanup (stmt);
1619 cleanup_gsi = gsi_start (cleanup_seq);
1620 remove_useless_stmts_1 (&cleanup_gsi, data);
1622 /* If the body is empty, then we can emit the FINALLY block without
1623 the enclosing TRY_FINALLY_EXPR. */
1624 if (gimple_seq_empty_p (eval_seq))
1626 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1627 gsi_remove (gsi, false);
1628 data->repeat = true;
1631 /* If the handler is empty, then we can emit the TRY block without
1632 the enclosing TRY_FINALLY_EXPR. */
1633 else if (gimple_seq_empty_p (cleanup_seq))
1635 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1636 gsi_remove (gsi, false);
1637 data->repeat = true;
1640 /* If the body neither throws, nor branches, then we can safely
1641 string the TRY and FINALLY blocks together. */
1642 else if (!this_may_branch && !this_may_throw)
1644 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1645 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1646 gsi_remove (gsi, false);
1647 data->repeat = true;
1649 else
1650 gsi_next (gsi);
1653 /* Helper for remove_useless_stmts_1.
1654 Handle the try-catch case for GIMPLE_TRY statements. */
1656 static void
1657 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1659 bool save_may_throw, this_may_throw;
1661 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1662 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1664 gimple stmt = gsi_stmt (*gsi);
1666 /* Collect may_throw information for the body only. */
1667 save_may_throw = data->may_throw;
1668 data->may_throw = false;
1669 data->last_was_goto = false;
1671 eval_seq = gimple_try_eval (stmt);
1672 eval_gsi = gsi_start (eval_seq);
1673 remove_useless_stmts_1 (&eval_gsi, data);
1675 this_may_throw = data->may_throw;
1676 data->may_throw = save_may_throw;
1678 cleanup_seq = gimple_try_cleanup (stmt);
1680 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1681 if (!this_may_throw)
1683 if (warn_notreached)
1685 remove_useless_stmts_warn_notreached (cleanup_seq);
1687 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1688 gsi_remove (gsi, false);
1689 data->repeat = true;
1690 return;
1693 /* Process the catch clause specially. We may be able to tell that
1694 no exceptions propagate past this point. */
1696 this_may_throw = true;
1697 cleanup_gsi = gsi_start (cleanup_seq);
1698 stmt = gsi_stmt (cleanup_gsi);
1699 data->last_was_goto = false;
1701 switch (gimple_code (stmt))
1703 case GIMPLE_CATCH:
1704 /* If the first element is a catch, they all must be. */
1705 while (!gsi_end_p (cleanup_gsi))
1707 stmt = gsi_stmt (cleanup_gsi);
1708 /* If we catch all exceptions, then the body does not
1709 propagate exceptions past this point. */
1710 if (gimple_catch_types (stmt) == NULL)
1711 this_may_throw = false;
1712 data->last_was_goto = false;
1713 handler_seq = gimple_catch_handler (stmt);
1714 handler_gsi = gsi_start (handler_seq);
1715 remove_useless_stmts_1 (&handler_gsi, data);
1716 gsi_next (&cleanup_gsi);
1718 gsi_next (gsi);
1719 break;
1721 case GIMPLE_EH_FILTER:
1722 /* If the first element is an eh_filter, it should stand alone. */
1723 if (gimple_eh_filter_must_not_throw (stmt))
1724 this_may_throw = false;
1725 else if (gimple_eh_filter_types (stmt) == NULL)
1726 this_may_throw = false;
1727 failure_seq = gimple_eh_filter_failure (stmt);
1728 failure_gsi = gsi_start (failure_seq);
1729 remove_useless_stmts_1 (&failure_gsi, data);
1730 gsi_next (gsi);
1731 break;
1733 default:
1734 /* Otherwise this is a list of cleanup statements. */
1735 remove_useless_stmts_1 (&cleanup_gsi, data);
1737 /* If the cleanup is empty, then we can emit the TRY block without
1738 the enclosing TRY_CATCH_EXPR. */
1739 if (gimple_seq_empty_p (cleanup_seq))
1741 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1742 gsi_remove(gsi, false);
1743 data->repeat = true;
1745 else
1746 gsi_next (gsi);
1747 break;
1750 data->may_throw |= this_may_throw;
1753 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1755 static void
1756 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1758 tree block;
1759 gimple_seq body_seq, fn_body_seq;
1760 gimple_stmt_iterator body_gsi;
1762 gimple stmt = gsi_stmt (*gsi);
1764 /* First remove anything underneath the BIND_EXPR. */
1766 body_seq = gimple_bind_body (stmt);
1767 body_gsi = gsi_start (body_seq);
1768 remove_useless_stmts_1 (&body_gsi, data);
1770 /* If the GIMPLE_BIND has no variables, then we can pull everything
1771 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1772 GIMPLE_BIND for the current function or an inlined function.
1774 When this situation occurs we will want to apply this
1775 optimization again. */
1776 block = gimple_bind_block (stmt);
1777 fn_body_seq = gimple_body (current_function_decl);
1778 if (gimple_bind_vars (stmt) == NULL_TREE
1779 && (gimple_seq_empty_p (fn_body_seq)
1780 || stmt != gimple_seq_first_stmt (fn_body_seq))
1781 && (! block
1782 || ! BLOCK_ABSTRACT_ORIGIN (block)
1783 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1784 != FUNCTION_DECL)))
1786 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1787 gsi_remove (gsi, false);
1788 data->repeat = true;
1790 else
1791 gsi_next (gsi);
1794 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1796 static void
1797 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1799 gimple stmt = gsi_stmt (*gsi);
1801 tree dest = gimple_goto_dest (stmt);
1803 data->may_branch = true;
1804 data->last_was_goto = false;
1806 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1807 if (TREE_CODE (dest) == LABEL_DECL)
1809 data->last_goto_gsi = *gsi;
1810 data->last_was_goto = true;
1813 gsi_next(gsi);
1816 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1818 static void
1819 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1821 gimple stmt = gsi_stmt (*gsi);
1823 tree label = gimple_label_label (stmt);
1825 data->has_label = true;
1827 /* We do want to jump across non-local label receiver code. */
1828 if (DECL_NONLOCAL (label))
1829 data->last_was_goto = false;
1831 else if (data->last_was_goto
1832 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1834 /* Replace the preceding GIMPLE_GOTO statement with
1835 a GIMPLE_NOP, which will be subsequently removed.
1836 In this way, we avoid invalidating other iterators
1837 active on the statement sequence. */
1838 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1839 data->last_was_goto = false;
1840 data->repeat = true;
1843 /* ??? Add something here to delete unused labels. */
1845 gsi_next (gsi);
1849 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1851 void
1852 notice_special_calls (gimple call)
1854 int flags = gimple_call_flags (call);
1856 if (flags & ECF_MAY_BE_ALLOCA)
1857 cfun->calls_alloca = true;
1858 if (flags & ECF_RETURNS_TWICE)
1859 cfun->calls_setjmp = true;
1863 /* Clear flags set by notice_special_calls. Used by dead code removal
1864 to update the flags. */
1866 void
1867 clear_special_calls (void)
1869 cfun->calls_alloca = false;
1870 cfun->calls_setjmp = false;
1873 /* Remove useless statements from a statement sequence, and perform
1874 some preliminary simplifications. */
1876 static void
1877 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
1879 while (!gsi_end_p (*gsi))
1881 gimple stmt = gsi_stmt (*gsi);
1883 switch (gimple_code (stmt))
1885 case GIMPLE_COND:
1886 remove_useless_stmts_cond (gsi, data);
1887 break;
1889 case GIMPLE_GOTO:
1890 remove_useless_stmts_goto (gsi, data);
1891 break;
1893 case GIMPLE_LABEL:
1894 remove_useless_stmts_label (gsi, data);
1895 break;
1897 case GIMPLE_ASSIGN:
1898 fold_stmt (gsi);
1899 stmt = gsi_stmt (*gsi);
1900 data->last_was_goto = false;
1901 if (stmt_could_throw_p (stmt))
1902 data->may_throw = true;
1903 gsi_next (gsi);
1904 break;
1906 case GIMPLE_ASM:
1907 fold_stmt (gsi);
1908 data->last_was_goto = false;
1909 gsi_next (gsi);
1910 break;
1912 case GIMPLE_CALL:
1913 fold_stmt (gsi);
1914 stmt = gsi_stmt (*gsi);
1915 data->last_was_goto = false;
1916 if (is_gimple_call (stmt))
1917 notice_special_calls (stmt);
1919 /* We used to call update_gimple_call_flags here,
1920 which copied side-effects and nothrows status
1921 from the function decl to the call. In the new
1922 tuplified GIMPLE, the accessors for this information
1923 always consult the function decl, so this copying
1924 is no longer necessary. */
1925 if (stmt_could_throw_p (stmt))
1926 data->may_throw = true;
1927 gsi_next (gsi);
1928 break;
1930 case GIMPLE_RETURN:
1931 fold_stmt (gsi);
1932 data->last_was_goto = false;
1933 data->may_branch = true;
1934 gsi_next (gsi);
1935 break;
1937 case GIMPLE_BIND:
1938 remove_useless_stmts_bind (gsi, data);
1939 break;
1941 case GIMPLE_TRY:
1942 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1943 remove_useless_stmts_tc (gsi, data);
1944 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1945 remove_useless_stmts_tf (gsi, data);
1946 else
1947 gcc_unreachable ();
1948 break;
1950 case GIMPLE_CATCH:
1951 gcc_unreachable ();
1952 break;
1954 case GIMPLE_NOP:
1955 gsi_remove (gsi, false);
1956 break;
1958 case GIMPLE_OMP_FOR:
1960 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
1961 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
1963 remove_useless_stmts_1 (&pre_body_gsi, data);
1964 data->last_was_goto = false;
1966 /* FALLTHROUGH */
1967 case GIMPLE_OMP_CRITICAL:
1968 case GIMPLE_OMP_CONTINUE:
1969 case GIMPLE_OMP_MASTER:
1970 case GIMPLE_OMP_ORDERED:
1971 case GIMPLE_OMP_SECTION:
1972 case GIMPLE_OMP_SECTIONS:
1973 case GIMPLE_OMP_SINGLE:
1975 gimple_seq body_seq = gimple_omp_body (stmt);
1976 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
1978 remove_useless_stmts_1 (&body_gsi, data);
1979 data->last_was_goto = false;
1980 gsi_next (gsi);
1982 break;
1984 case GIMPLE_OMP_PARALLEL:
1985 case GIMPLE_OMP_TASK:
1987 /* Make sure the outermost GIMPLE_BIND isn't removed
1988 as useless. */
1989 gimple_seq body_seq = gimple_omp_body (stmt);
1990 gimple bind = gimple_seq_first_stmt (body_seq);
1991 gimple_seq bind_seq = gimple_bind_body (bind);
1992 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
1994 remove_useless_stmts_1 (&bind_gsi, data);
1995 data->last_was_goto = false;
1996 gsi_next (gsi);
1998 break;
2000 case GIMPLE_CHANGE_DYNAMIC_TYPE:
2001 /* If we do not optimize remove GIMPLE_CHANGE_DYNAMIC_TYPE as
2002 expansion is confused about them and we only remove them
2003 during alias computation otherwise. */
2004 if (!optimize)
2006 data->last_was_goto = false;
2007 gsi_remove (gsi, false);
2008 break;
2010 /* Fallthru. */
2012 default:
2013 data->last_was_goto = false;
2014 gsi_next (gsi);
2015 break;
2020 /* Walk the function tree, removing useless statements and performing
2021 some preliminary simplifications. */
2023 static unsigned int
2024 remove_useless_stmts (void)
2026 struct rus_data data;
2028 clear_special_calls ();
2032 gimple_stmt_iterator gsi;
2034 gsi = gsi_start (gimple_body (current_function_decl));
2035 memset (&data, 0, sizeof (data));
2036 remove_useless_stmts_1 (&gsi, &data);
2038 while (data.repeat);
2039 return 0;
2043 struct gimple_opt_pass pass_remove_useless_stmts =
2046 GIMPLE_PASS,
2047 "useless", /* name */
2048 NULL, /* gate */
2049 remove_useless_stmts, /* execute */
2050 NULL, /* sub */
2051 NULL, /* next */
2052 0, /* static_pass_number */
2053 0, /* tv_id */
2054 PROP_gimple_any, /* properties_required */
2055 0, /* properties_provided */
2056 0, /* properties_destroyed */
2057 0, /* todo_flags_start */
2058 TODO_dump_func /* todo_flags_finish */
2062 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2064 static void
2065 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2067 gimple_stmt_iterator gsi;
2069 /* Since this block is no longer reachable, we can just delete all
2070 of its PHI nodes. */
2071 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
2072 remove_phi_node (&gsi, true);
2074 set_phi_nodes (bb, NULL);
2076 /* Remove edges to BB's successors. */
2077 while (EDGE_COUNT (bb->succs) > 0)
2078 remove_edge (EDGE_SUCC (bb, 0));
2082 /* Remove statements of basic block BB. */
2084 static void
2085 remove_bb (basic_block bb)
2087 gimple_stmt_iterator i;
2088 source_location loc = UNKNOWN_LOCATION;
2090 if (dump_file)
2092 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2093 if (dump_flags & TDF_DETAILS)
2095 dump_bb (bb, dump_file, 0);
2096 fprintf (dump_file, "\n");
2100 if (current_loops)
2102 struct loop *loop = bb->loop_father;
2104 /* If a loop gets removed, clean up the information associated
2105 with it. */
2106 if (loop->latch == bb
2107 || loop->header == bb)
2108 free_numbers_of_iterations_estimates_loop (loop);
2111 /* Remove all the instructions in the block. */
2112 if (bb_seq (bb) != NULL)
2114 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2116 gimple stmt = gsi_stmt (i);
2117 if (gimple_code (stmt) == GIMPLE_LABEL
2118 && (FORCED_LABEL (gimple_label_label (stmt))
2119 || DECL_NONLOCAL (gimple_label_label (stmt))))
2121 basic_block new_bb;
2122 gimple_stmt_iterator new_gsi;
2124 /* A non-reachable non-local label may still be referenced.
2125 But it no longer needs to carry the extra semantics of
2126 non-locality. */
2127 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2129 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2130 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2133 new_bb = bb->prev_bb;
2134 new_gsi = gsi_start_bb (new_bb);
2135 gsi_remove (&i, false);
2136 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2138 else
2140 /* Release SSA definitions if we are in SSA. Note that we
2141 may be called when not in SSA. For example,
2142 final_cleanup calls this function via
2143 cleanup_tree_cfg. */
2144 if (gimple_in_ssa_p (cfun))
2145 release_defs (stmt);
2147 gsi_remove (&i, true);
2150 /* Don't warn for removed gotos. Gotos are often removed due to
2151 jump threading, thus resulting in bogus warnings. Not great,
2152 since this way we lose warnings for gotos in the original
2153 program that are indeed unreachable. */
2154 if (gimple_code (stmt) != GIMPLE_GOTO
2155 && gimple_has_location (stmt)
2156 && !loc)
2157 loc = gimple_location (stmt);
2161 /* If requested, give a warning that the first statement in the
2162 block is unreachable. We walk statements backwards in the
2163 loop above, so the last statement we process is the first statement
2164 in the block. */
2165 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2166 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2168 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2169 bb->il.gimple = NULL;
2173 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2174 predicate VAL, return the edge that will be taken out of the block.
2175 If VAL does not match a unique edge, NULL is returned. */
2177 edge
2178 find_taken_edge (basic_block bb, tree val)
2180 gimple stmt;
2182 stmt = last_stmt (bb);
2184 gcc_assert (stmt);
2185 gcc_assert (is_ctrl_stmt (stmt));
2187 if (val == NULL)
2188 return NULL;
2190 if (!is_gimple_min_invariant (val))
2191 return NULL;
2193 if (gimple_code (stmt) == GIMPLE_COND)
2194 return find_taken_edge_cond_expr (bb, val);
2196 if (gimple_code (stmt) == GIMPLE_SWITCH)
2197 return find_taken_edge_switch_expr (bb, val);
2199 if (computed_goto_p (stmt))
2201 /* Only optimize if the argument is a label, if the argument is
2202 not a label then we can not construct a proper CFG.
2204 It may be the case that we only need to allow the LABEL_REF to
2205 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2206 appear inside a LABEL_EXPR just to be safe. */
2207 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2208 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2209 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2210 return NULL;
2213 gcc_unreachable ();
2216 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2217 statement, determine which of the outgoing edges will be taken out of the
2218 block. Return NULL if either edge may be taken. */
2220 static edge
2221 find_taken_edge_computed_goto (basic_block bb, tree val)
2223 basic_block dest;
2224 edge e = NULL;
2226 dest = label_to_block (val);
2227 if (dest)
2229 e = find_edge (bb, dest);
2230 gcc_assert (e != NULL);
2233 return e;
2236 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2237 statement, determine which of the two edges will be taken out of the
2238 block. Return NULL if either edge may be taken. */
2240 static edge
2241 find_taken_edge_cond_expr (basic_block bb, tree val)
2243 edge true_edge, false_edge;
2245 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2247 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2248 return (integer_zerop (val) ? false_edge : true_edge);
2251 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2252 statement, determine which edge will be taken out of the block. Return
2253 NULL if any edge may be taken. */
2255 static edge
2256 find_taken_edge_switch_expr (basic_block bb, tree val)
2258 basic_block dest_bb;
2259 edge e;
2260 gimple switch_stmt;
2261 tree taken_case;
2263 switch_stmt = last_stmt (bb);
2264 taken_case = find_case_label_for_value (switch_stmt, val);
2265 dest_bb = label_to_block (CASE_LABEL (taken_case));
2267 e = find_edge (bb, dest_bb);
2268 gcc_assert (e);
2269 return e;
2273 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2274 We can make optimal use here of the fact that the case labels are
2275 sorted: We can do a binary search for a case matching VAL. */
2277 static tree
2278 find_case_label_for_value (gimple switch_stmt, tree val)
2280 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2281 tree default_case = gimple_switch_default_label (switch_stmt);
2283 for (low = 0, high = n; high - low > 1; )
2285 size_t i = (high + low) / 2;
2286 tree t = gimple_switch_label (switch_stmt, i);
2287 int cmp;
2289 /* Cache the result of comparing CASE_LOW and val. */
2290 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2292 if (cmp > 0)
2293 high = i;
2294 else
2295 low = i;
2297 if (CASE_HIGH (t) == NULL)
2299 /* A singe-valued case label. */
2300 if (cmp == 0)
2301 return t;
2303 else
2305 /* A case range. We can only handle integer ranges. */
2306 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2307 return t;
2311 return default_case;
2315 /* Dump a basic block on stderr. */
2317 void
2318 gimple_debug_bb (basic_block bb)
2320 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2324 /* Dump basic block with index N on stderr. */
2326 basic_block
2327 gimple_debug_bb_n (int n)
2329 gimple_debug_bb (BASIC_BLOCK (n));
2330 return BASIC_BLOCK (n);
2334 /* Dump the CFG on stderr.
2336 FLAGS are the same used by the tree dumping functions
2337 (see TDF_* in tree-pass.h). */
2339 void
2340 gimple_debug_cfg (int flags)
2342 gimple_dump_cfg (stderr, flags);
2346 /* Dump the program showing basic block boundaries on the given FILE.
2348 FLAGS are the same used by the tree dumping functions (see TDF_* in
2349 tree.h). */
2351 void
2352 gimple_dump_cfg (FILE *file, int flags)
2354 if (flags & TDF_DETAILS)
2356 const char *funcname
2357 = lang_hooks.decl_printable_name (current_function_decl, 2);
2359 fputc ('\n', file);
2360 fprintf (file, ";; Function %s\n\n", funcname);
2361 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2362 n_basic_blocks, n_edges, last_basic_block);
2364 brief_dump_cfg (file);
2365 fprintf (file, "\n");
2368 if (flags & TDF_STATS)
2369 dump_cfg_stats (file);
2371 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2375 /* Dump CFG statistics on FILE. */
2377 void
2378 dump_cfg_stats (FILE *file)
2380 static long max_num_merged_labels = 0;
2381 unsigned long size, total = 0;
2382 long num_edges;
2383 basic_block bb;
2384 const char * const fmt_str = "%-30s%-13s%12s\n";
2385 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2386 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2387 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2388 const char *funcname
2389 = lang_hooks.decl_printable_name (current_function_decl, 2);
2392 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2394 fprintf (file, "---------------------------------------------------------\n");
2395 fprintf (file, fmt_str, "", " Number of ", "Memory");
2396 fprintf (file, fmt_str, "", " instances ", "used ");
2397 fprintf (file, "---------------------------------------------------------\n");
2399 size = n_basic_blocks * sizeof (struct basic_block_def);
2400 total += size;
2401 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2402 SCALE (size), LABEL (size));
2404 num_edges = 0;
2405 FOR_EACH_BB (bb)
2406 num_edges += EDGE_COUNT (bb->succs);
2407 size = num_edges * sizeof (struct edge_def);
2408 total += size;
2409 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2411 fprintf (file, "---------------------------------------------------------\n");
2412 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2413 LABEL (total));
2414 fprintf (file, "---------------------------------------------------------\n");
2415 fprintf (file, "\n");
2417 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2418 max_num_merged_labels = cfg_stats.num_merged_labels;
2420 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2421 cfg_stats.num_merged_labels, max_num_merged_labels);
2423 fprintf (file, "\n");
2427 /* Dump CFG statistics on stderr. Keep extern so that it's always
2428 linked in the final executable. */
2430 void
2431 debug_cfg_stats (void)
2433 dump_cfg_stats (stderr);
2437 /* Dump the flowgraph to a .vcg FILE. */
2439 static void
2440 gimple_cfg2vcg (FILE *file)
2442 edge e;
2443 edge_iterator ei;
2444 basic_block bb;
2445 const char *funcname
2446 = lang_hooks.decl_printable_name (current_function_decl, 2);
2448 /* Write the file header. */
2449 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2450 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2451 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2453 /* Write blocks and edges. */
2454 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2456 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2457 e->dest->index);
2459 if (e->flags & EDGE_FAKE)
2460 fprintf (file, " linestyle: dotted priority: 10");
2461 else
2462 fprintf (file, " linestyle: solid priority: 100");
2464 fprintf (file, " }\n");
2466 fputc ('\n', file);
2468 FOR_EACH_BB (bb)
2470 enum gimple_code head_code, end_code;
2471 const char *head_name, *end_name;
2472 int head_line = 0;
2473 int end_line = 0;
2474 gimple first = first_stmt (bb);
2475 gimple last = last_stmt (bb);
2477 if (first)
2479 head_code = gimple_code (first);
2480 head_name = gimple_code_name[head_code];
2481 head_line = get_lineno (first);
2483 else
2484 head_name = "no-statement";
2486 if (last)
2488 end_code = gimple_code (last);
2489 end_name = gimple_code_name[end_code];
2490 end_line = get_lineno (last);
2492 else
2493 end_name = "no-statement";
2495 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2496 bb->index, bb->index, head_name, head_line, end_name,
2497 end_line);
2499 FOR_EACH_EDGE (e, ei, bb->succs)
2501 if (e->dest == EXIT_BLOCK_PTR)
2502 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2503 else
2504 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2506 if (e->flags & EDGE_FAKE)
2507 fprintf (file, " priority: 10 linestyle: dotted");
2508 else
2509 fprintf (file, " priority: 100 linestyle: solid");
2511 fprintf (file, " }\n");
2514 if (bb->next_bb != EXIT_BLOCK_PTR)
2515 fputc ('\n', file);
2518 fputs ("}\n\n", file);
2523 /*---------------------------------------------------------------------------
2524 Miscellaneous helpers
2525 ---------------------------------------------------------------------------*/
2527 /* Return true if T represents a stmt that always transfers control. */
2529 bool
2530 is_ctrl_stmt (gimple t)
2532 return gimple_code (t) == GIMPLE_COND
2533 || gimple_code (t) == GIMPLE_SWITCH
2534 || gimple_code (t) == GIMPLE_GOTO
2535 || gimple_code (t) == GIMPLE_RETURN
2536 || gimple_code (t) == GIMPLE_RESX;
2540 /* Return true if T is a statement that may alter the flow of control
2541 (e.g., a call to a non-returning function). */
2543 bool
2544 is_ctrl_altering_stmt (gimple t)
2546 gcc_assert (t);
2548 if (is_gimple_call (t))
2550 int flags = gimple_call_flags (t);
2552 /* A non-pure/const call alters flow control if the current
2553 function has nonlocal labels. */
2554 if (!(flags & (ECF_CONST | ECF_PURE))
2555 && cfun->has_nonlocal_label)
2556 return true;
2558 /* A call also alters control flow if it does not return. */
2559 if (gimple_call_flags (t) & ECF_NORETURN)
2560 return true;
2563 /* OpenMP directives alter control flow. */
2564 if (is_gimple_omp (t))
2565 return true;
2567 /* If a statement can throw, it alters control flow. */
2568 return stmt_can_throw_internal (t);
2572 /* Return true if T is a simple local goto. */
2574 bool
2575 simple_goto_p (gimple t)
2577 return (gimple_code (t) == GIMPLE_GOTO
2578 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2582 /* Return true if T can make an abnormal transfer of control flow.
2583 Transfers of control flow associated with EH are excluded. */
2585 bool
2586 stmt_can_make_abnormal_goto (gimple t)
2588 if (computed_goto_p (t))
2589 return true;
2590 if (is_gimple_call (t))
2591 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2592 return false;
2596 /* Return true if STMT should start a new basic block. PREV_STMT is
2597 the statement preceding STMT. It is used when STMT is a label or a
2598 case label. Labels should only start a new basic block if their
2599 previous statement wasn't a label. Otherwise, sequence of labels
2600 would generate unnecessary basic blocks that only contain a single
2601 label. */
2603 static inline bool
2604 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2606 if (stmt == NULL)
2607 return false;
2609 /* Labels start a new basic block only if the preceding statement
2610 wasn't a label of the same type. This prevents the creation of
2611 consecutive blocks that have nothing but a single label. */
2612 if (gimple_code (stmt) == GIMPLE_LABEL)
2614 /* Nonlocal and computed GOTO targets always start a new block. */
2615 if (DECL_NONLOCAL (gimple_label_label (stmt))
2616 || FORCED_LABEL (gimple_label_label (stmt)))
2617 return true;
2619 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2621 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2622 return true;
2624 cfg_stats.num_merged_labels++;
2625 return false;
2627 else
2628 return true;
2631 return false;
2635 /* Return true if T should end a basic block. */
2637 bool
2638 stmt_ends_bb_p (gimple t)
2640 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2643 /* Remove block annotations and other data structures. */
2645 void
2646 delete_tree_cfg_annotations (void)
2648 label_to_block_map = NULL;
2652 /* Return the first statement in basic block BB. */
2654 gimple
2655 first_stmt (basic_block bb)
2657 gimple_stmt_iterator i = gsi_start_bb (bb);
2658 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2661 /* Return the last statement in basic block BB. */
2663 gimple
2664 last_stmt (basic_block bb)
2666 gimple_stmt_iterator b = gsi_last_bb (bb);
2667 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2670 /* Return the last statement of an otherwise empty block. Return NULL
2671 if the block is totally empty, or if it contains more than one
2672 statement. */
2674 gimple
2675 last_and_only_stmt (basic_block bb)
2677 gimple_stmt_iterator i = gsi_last_bb (bb);
2678 gimple last, prev;
2680 if (gsi_end_p (i))
2681 return NULL;
2683 last = gsi_stmt (i);
2684 gsi_prev (&i);
2685 if (gsi_end_p (i))
2686 return last;
2688 /* Empty statements should no longer appear in the instruction stream.
2689 Everything that might have appeared before should be deleted by
2690 remove_useless_stmts, and the optimizers should just gsi_remove
2691 instead of smashing with build_empty_stmt.
2693 Thus the only thing that should appear here in a block containing
2694 one executable statement is a label. */
2695 prev = gsi_stmt (i);
2696 if (gimple_code (prev) == GIMPLE_LABEL)
2697 return last;
2698 else
2699 return NULL;
2702 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2704 static void
2705 reinstall_phi_args (edge new_edge, edge old_edge)
2707 edge_var_map_vector v;
2708 edge_var_map *vm;
2709 int i;
2710 gimple_stmt_iterator phis;
2712 v = redirect_edge_var_map_vector (old_edge);
2713 if (!v)
2714 return;
2716 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2717 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2718 i++, gsi_next (&phis))
2720 gimple phi = gsi_stmt (phis);
2721 tree result = redirect_edge_var_map_result (vm);
2722 tree arg = redirect_edge_var_map_def (vm);
2724 gcc_assert (result == gimple_phi_result (phi));
2726 add_phi_arg (phi, arg, new_edge);
2729 redirect_edge_var_map_clear (old_edge);
2732 /* Returns the basic block after which the new basic block created
2733 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2734 near its "logical" location. This is of most help to humans looking
2735 at debugging dumps. */
2737 static basic_block
2738 split_edge_bb_loc (edge edge_in)
2740 basic_block dest = edge_in->dest;
2742 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2743 return edge_in->src;
2744 else
2745 return dest->prev_bb;
2748 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2749 Abort on abnormal edges. */
2751 static basic_block
2752 gimple_split_edge (edge edge_in)
2754 basic_block new_bb, after_bb, dest;
2755 edge new_edge, e;
2757 /* Abnormal edges cannot be split. */
2758 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2760 dest = edge_in->dest;
2762 after_bb = split_edge_bb_loc (edge_in);
2764 new_bb = create_empty_bb (after_bb);
2765 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2766 new_bb->count = edge_in->count;
2767 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2768 new_edge->probability = REG_BR_PROB_BASE;
2769 new_edge->count = edge_in->count;
2771 e = redirect_edge_and_branch (edge_in, new_bb);
2772 gcc_assert (e == edge_in);
2773 reinstall_phi_args (new_edge, e);
2775 return new_bb;
2778 /* Callback for walk_tree, check that all elements with address taken are
2779 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2780 inside a PHI node. */
2782 static tree
2783 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2785 tree t = *tp, x;
2787 if (TYPE_P (t))
2788 *walk_subtrees = 0;
2790 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2791 #define CHECK_OP(N, MSG) \
2792 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2793 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2795 switch (TREE_CODE (t))
2797 case SSA_NAME:
2798 if (SSA_NAME_IN_FREE_LIST (t))
2800 error ("SSA name in freelist but still referenced");
2801 return *tp;
2803 break;
2805 case ASSERT_EXPR:
2806 x = fold (ASSERT_EXPR_COND (t));
2807 if (x == boolean_false_node)
2809 error ("ASSERT_EXPR with an always-false condition");
2810 return *tp;
2812 break;
2814 case MODIFY_EXPR:
2815 x = TREE_OPERAND (t, 0);
2816 if (TREE_CODE (x) == BIT_FIELD_REF
2817 && is_gimple_reg (TREE_OPERAND (x, 0)))
2819 error ("GIMPLE register modified with BIT_FIELD_REF");
2820 return t;
2822 break;
2824 case ADDR_EXPR:
2826 bool old_constant;
2827 bool old_side_effects;
2828 bool new_constant;
2829 bool new_side_effects;
2831 gcc_assert (is_gimple_address (t));
2833 old_constant = TREE_CONSTANT (t);
2834 old_side_effects = TREE_SIDE_EFFECTS (t);
2836 recompute_tree_invariant_for_addr_expr (t);
2837 new_side_effects = TREE_SIDE_EFFECTS (t);
2838 new_constant = TREE_CONSTANT (t);
2840 if (old_constant != new_constant)
2842 error ("constant not recomputed when ADDR_EXPR changed");
2843 return t;
2845 if (old_side_effects != new_side_effects)
2847 error ("side effects not recomputed when ADDR_EXPR changed");
2848 return t;
2851 /* Skip any references (they will be checked when we recurse down the
2852 tree) and ensure that any variable used as a prefix is marked
2853 addressable. */
2854 for (x = TREE_OPERAND (t, 0);
2855 handled_component_p (x);
2856 x = TREE_OPERAND (x, 0))
2859 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
2860 return NULL;
2861 if (!TREE_ADDRESSABLE (x))
2863 error ("address taken, but ADDRESSABLE bit not set");
2864 return x;
2867 break;
2870 case COND_EXPR:
2871 x = COND_EXPR_COND (t);
2872 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2874 error ("non-integral used in condition");
2875 return x;
2877 if (!is_gimple_condexpr (x))
2879 error ("invalid conditional operand");
2880 return x;
2882 break;
2884 case NON_LVALUE_EXPR:
2885 gcc_unreachable ();
2887 CASE_CONVERT:
2888 case FIX_TRUNC_EXPR:
2889 case FLOAT_EXPR:
2890 case NEGATE_EXPR:
2891 case ABS_EXPR:
2892 case BIT_NOT_EXPR:
2893 case TRUTH_NOT_EXPR:
2894 CHECK_OP (0, "invalid operand to unary operator");
2895 break;
2897 case REALPART_EXPR:
2898 case IMAGPART_EXPR:
2899 case COMPONENT_REF:
2900 case ARRAY_REF:
2901 case ARRAY_RANGE_REF:
2902 case BIT_FIELD_REF:
2903 case VIEW_CONVERT_EXPR:
2904 /* We have a nest of references. Verify that each of the operands
2905 that determine where to reference is either a constant or a variable,
2906 verify that the base is valid, and then show we've already checked
2907 the subtrees. */
2908 while (handled_component_p (t))
2910 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2911 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2912 else if (TREE_CODE (t) == ARRAY_REF
2913 || TREE_CODE (t) == ARRAY_RANGE_REF)
2915 CHECK_OP (1, "invalid array index");
2916 if (TREE_OPERAND (t, 2))
2917 CHECK_OP (2, "invalid array lower bound");
2918 if (TREE_OPERAND (t, 3))
2919 CHECK_OP (3, "invalid array stride");
2921 else if (TREE_CODE (t) == BIT_FIELD_REF)
2923 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2924 || !host_integerp (TREE_OPERAND (t, 2), 1))
2926 error ("invalid position or size operand to BIT_FIELD_REF");
2927 return t;
2929 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2930 && (TYPE_PRECISION (TREE_TYPE (t))
2931 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2933 error ("integral result type precision does not match "
2934 "field size of BIT_FIELD_REF");
2935 return t;
2937 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2938 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2939 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2941 error ("mode precision of non-integral result does not "
2942 "match field size of BIT_FIELD_REF");
2943 return t;
2947 t = TREE_OPERAND (t, 0);
2950 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2952 error ("invalid reference prefix");
2953 return t;
2955 *walk_subtrees = 0;
2956 break;
2957 case PLUS_EXPR:
2958 case MINUS_EXPR:
2959 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2960 POINTER_PLUS_EXPR. */
2961 if (POINTER_TYPE_P (TREE_TYPE (t)))
2963 error ("invalid operand to plus/minus, type is a pointer");
2964 return t;
2966 CHECK_OP (0, "invalid operand to binary operator");
2967 CHECK_OP (1, "invalid operand to binary operator");
2968 break;
2970 case POINTER_PLUS_EXPR:
2971 /* Check to make sure the first operand is a pointer or reference type. */
2972 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2974 error ("invalid operand to pointer plus, first operand is not a pointer");
2975 return t;
2977 /* Check to make sure the second operand is an integer with type of
2978 sizetype. */
2979 if (!useless_type_conversion_p (sizetype,
2980 TREE_TYPE (TREE_OPERAND (t, 1))))
2982 error ("invalid operand to pointer plus, second operand is not an "
2983 "integer with type of sizetype.");
2984 return t;
2986 /* FALLTHROUGH */
2987 case LT_EXPR:
2988 case LE_EXPR:
2989 case GT_EXPR:
2990 case GE_EXPR:
2991 case EQ_EXPR:
2992 case NE_EXPR:
2993 case UNORDERED_EXPR:
2994 case ORDERED_EXPR:
2995 case UNLT_EXPR:
2996 case UNLE_EXPR:
2997 case UNGT_EXPR:
2998 case UNGE_EXPR:
2999 case UNEQ_EXPR:
3000 case LTGT_EXPR:
3001 case MULT_EXPR:
3002 case TRUNC_DIV_EXPR:
3003 case CEIL_DIV_EXPR:
3004 case FLOOR_DIV_EXPR:
3005 case ROUND_DIV_EXPR:
3006 case TRUNC_MOD_EXPR:
3007 case CEIL_MOD_EXPR:
3008 case FLOOR_MOD_EXPR:
3009 case ROUND_MOD_EXPR:
3010 case RDIV_EXPR:
3011 case EXACT_DIV_EXPR:
3012 case MIN_EXPR:
3013 case MAX_EXPR:
3014 case LSHIFT_EXPR:
3015 case RSHIFT_EXPR:
3016 case LROTATE_EXPR:
3017 case RROTATE_EXPR:
3018 case BIT_IOR_EXPR:
3019 case BIT_XOR_EXPR:
3020 case BIT_AND_EXPR:
3021 CHECK_OP (0, "invalid operand to binary operator");
3022 CHECK_OP (1, "invalid operand to binary operator");
3023 break;
3025 case CONSTRUCTOR:
3026 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3027 *walk_subtrees = 0;
3028 break;
3030 default:
3031 break;
3033 return NULL;
3035 #undef CHECK_OP
3039 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3040 Returns true if there is an error, otherwise false. */
3042 static bool
3043 verify_types_in_gimple_min_lval (tree expr)
3045 tree op;
3047 if (is_gimple_id (expr))
3048 return false;
3050 if (!INDIRECT_REF_P (expr)
3051 && TREE_CODE (expr) != TARGET_MEM_REF)
3053 error ("invalid expression for min lvalue");
3054 return true;
3057 /* TARGET_MEM_REFs are strange beasts. */
3058 if (TREE_CODE (expr) == TARGET_MEM_REF)
3059 return false;
3061 op = TREE_OPERAND (expr, 0);
3062 if (!is_gimple_val (op))
3064 error ("invalid operand in indirect reference");
3065 debug_generic_stmt (op);
3066 return true;
3068 if (!useless_type_conversion_p (TREE_TYPE (expr),
3069 TREE_TYPE (TREE_TYPE (op))))
3071 error ("type mismatch in indirect reference");
3072 debug_generic_stmt (TREE_TYPE (expr));
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3074 return true;
3077 return false;
3080 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3081 if there is an error, otherwise false. */
3083 static bool
3084 verify_types_in_gimple_reference (tree expr)
3086 while (handled_component_p (expr))
3088 tree op = TREE_OPERAND (expr, 0);
3090 if (TREE_CODE (expr) == ARRAY_REF
3091 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3093 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3094 || (TREE_OPERAND (expr, 2)
3095 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3096 || (TREE_OPERAND (expr, 3)
3097 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3099 error ("invalid operands to array reference");
3100 debug_generic_stmt (expr);
3101 return true;
3105 /* Verify if the reference array element types are compatible. */
3106 if (TREE_CODE (expr) == ARRAY_REF
3107 && !useless_type_conversion_p (TREE_TYPE (expr),
3108 TREE_TYPE (TREE_TYPE (op))))
3110 error ("type mismatch in array reference");
3111 debug_generic_stmt (TREE_TYPE (expr));
3112 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3113 return true;
3115 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3116 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3117 TREE_TYPE (TREE_TYPE (op))))
3119 error ("type mismatch in array range reference");
3120 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3121 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3122 return true;
3125 if ((TREE_CODE (expr) == REALPART_EXPR
3126 || TREE_CODE (expr) == IMAGPART_EXPR)
3127 && !useless_type_conversion_p (TREE_TYPE (expr),
3128 TREE_TYPE (TREE_TYPE (op))))
3130 error ("type mismatch in real/imagpart reference");
3131 debug_generic_stmt (TREE_TYPE (expr));
3132 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3133 return true;
3136 if (TREE_CODE (expr) == COMPONENT_REF
3137 && !useless_type_conversion_p (TREE_TYPE (expr),
3138 TREE_TYPE (TREE_OPERAND (expr, 1))))
3140 error ("type mismatch in component reference");
3141 debug_generic_stmt (TREE_TYPE (expr));
3142 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3143 return true;
3146 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3147 is nothing to verify. Gross mismatches at most invoke
3148 undefined behavior. */
3149 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3150 && !handled_component_p (op))
3151 return false;
3153 expr = op;
3156 return verify_types_in_gimple_min_lval (expr);
3159 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3160 list of pointer-to types that is trivially convertible to DEST. */
3162 static bool
3163 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3165 tree src;
3167 if (!TYPE_POINTER_TO (src_obj))
3168 return true;
3170 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3171 if (useless_type_conversion_p (dest, src))
3172 return true;
3174 return false;
3177 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3178 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3180 static bool
3181 valid_fixed_convert_types_p (tree type1, tree type2)
3183 return (FIXED_POINT_TYPE_P (type1)
3184 && (INTEGRAL_TYPE_P (type2)
3185 || SCALAR_FLOAT_TYPE_P (type2)
3186 || FIXED_POINT_TYPE_P (type2)));
3189 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3190 is a problem, otherwise false. */
3192 static bool
3193 verify_gimple_call (gimple stmt)
3195 tree fn = gimple_call_fn (stmt);
3196 tree fntype;
3198 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3199 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3200 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3202 error ("non-function in gimple call");
3203 return true;
3206 if (gimple_call_lhs (stmt)
3207 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3209 error ("invalid LHS in gimple call");
3210 return true;
3213 fntype = TREE_TYPE (TREE_TYPE (fn));
3214 if (gimple_call_lhs (stmt)
3215 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3216 TREE_TYPE (fntype))
3217 /* ??? At least C++ misses conversions at assignments from
3218 void * call results.
3219 ??? Java is completely off. Especially with functions
3220 returning java.lang.Object.
3221 For now simply allow arbitrary pointer type conversions. */
3222 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3223 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3225 error ("invalid conversion in gimple call");
3226 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3227 debug_generic_stmt (TREE_TYPE (fntype));
3228 return true;
3231 /* ??? The C frontend passes unpromoted arguments in case it
3232 didn't see a function declaration before the call. So for now
3233 leave the call arguments unverified. Once we gimplify
3234 unit-at-a-time we have a chance to fix this. */
3236 return false;
3239 /* Verifies the gimple comparison with the result type TYPE and
3240 the operands OP0 and OP1. */
3242 static bool
3243 verify_gimple_comparison (tree type, tree op0, tree op1)
3245 tree op0_type = TREE_TYPE (op0);
3246 tree op1_type = TREE_TYPE (op1);
3248 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3250 error ("invalid operands in gimple comparison");
3251 return true;
3254 /* For comparisons we do not have the operations type as the
3255 effective type the comparison is carried out in. Instead
3256 we require that either the first operand is trivially
3257 convertible into the second, or the other way around.
3258 The resulting type of a comparison may be any integral type.
3259 Because we special-case pointers to void we allow
3260 comparisons of pointers with the same mode as well. */
3261 if ((!useless_type_conversion_p (op0_type, op1_type)
3262 && !useless_type_conversion_p (op1_type, op0_type)
3263 && (!POINTER_TYPE_P (op0_type)
3264 || !POINTER_TYPE_P (op1_type)
3265 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3266 || !INTEGRAL_TYPE_P (type))
3268 error ("type mismatch in comparison expression");
3269 debug_generic_expr (type);
3270 debug_generic_expr (op0_type);
3271 debug_generic_expr (op1_type);
3272 return true;
3275 return false;
3278 /* Verify a gimple assignment statement STMT with an unary rhs.
3279 Returns true if anything is wrong. */
3281 static bool
3282 verify_gimple_assign_unary (gimple stmt)
3284 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3285 tree lhs = gimple_assign_lhs (stmt);
3286 tree lhs_type = TREE_TYPE (lhs);
3287 tree rhs1 = gimple_assign_rhs1 (stmt);
3288 tree rhs1_type = TREE_TYPE (rhs1);
3290 if (!is_gimple_reg (lhs)
3291 && !(optimize == 0
3292 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3294 error ("non-register as LHS of unary operation");
3295 return true;
3298 if (!is_gimple_val (rhs1))
3300 error ("invalid operand in unary operation");
3301 return true;
3304 /* First handle conversions. */
3305 switch (rhs_code)
3307 CASE_CONVERT:
3309 /* Allow conversions between integral types and pointers only if
3310 there is no sign or zero extension involved.
3311 For targets were the precision of sizetype doesn't match that
3312 of pointers we need to allow arbitrary conversions from and
3313 to sizetype. */
3314 if ((POINTER_TYPE_P (lhs_type)
3315 && INTEGRAL_TYPE_P (rhs1_type)
3316 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3317 || rhs1_type == sizetype))
3318 || (POINTER_TYPE_P (rhs1_type)
3319 && INTEGRAL_TYPE_P (lhs_type)
3320 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3321 || lhs_type == sizetype)))
3322 return false;
3324 /* Allow conversion from integer to offset type and vice versa. */
3325 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3326 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3327 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3328 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3329 return false;
3331 /* Otherwise assert we are converting between types of the
3332 same kind. */
3333 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3335 error ("invalid types in nop conversion");
3336 debug_generic_expr (lhs_type);
3337 debug_generic_expr (rhs1_type);
3338 return true;
3341 return false;
3344 case FIXED_CONVERT_EXPR:
3346 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3347 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3349 error ("invalid types in fixed-point conversion");
3350 debug_generic_expr (lhs_type);
3351 debug_generic_expr (rhs1_type);
3352 return true;
3355 return false;
3358 case FLOAT_EXPR:
3360 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3362 error ("invalid types in conversion to floating point");
3363 debug_generic_expr (lhs_type);
3364 debug_generic_expr (rhs1_type);
3365 return true;
3368 return false;
3371 case FIX_TRUNC_EXPR:
3373 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3375 error ("invalid types in conversion to integer");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3381 return false;
3384 case TRUTH_NOT_EXPR:
3388 case NEGATE_EXPR:
3389 case ABS_EXPR:
3390 case BIT_NOT_EXPR:
3391 case PAREN_EXPR:
3392 case NON_LVALUE_EXPR:
3393 case CONJ_EXPR:
3394 case REDUC_MAX_EXPR:
3395 case REDUC_MIN_EXPR:
3396 case REDUC_PLUS_EXPR:
3397 case VEC_UNPACK_HI_EXPR:
3398 case VEC_UNPACK_LO_EXPR:
3399 case VEC_UNPACK_FLOAT_HI_EXPR:
3400 case VEC_UNPACK_FLOAT_LO_EXPR:
3401 break;
3403 default:
3404 gcc_unreachable ();
3407 /* For the remaining codes assert there is no conversion involved. */
3408 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3410 error ("non-trivial conversion in unary operation");
3411 debug_generic_expr (lhs_type);
3412 debug_generic_expr (rhs1_type);
3413 return true;
3416 return false;
3419 /* Verify a gimple assignment statement STMT with a binary rhs.
3420 Returns true if anything is wrong. */
3422 static bool
3423 verify_gimple_assign_binary (gimple stmt)
3425 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3426 tree lhs = gimple_assign_lhs (stmt);
3427 tree lhs_type = TREE_TYPE (lhs);
3428 tree rhs1 = gimple_assign_rhs1 (stmt);
3429 tree rhs1_type = TREE_TYPE (rhs1);
3430 tree rhs2 = gimple_assign_rhs2 (stmt);
3431 tree rhs2_type = TREE_TYPE (rhs2);
3433 if (!is_gimple_reg (lhs)
3434 && !(optimize == 0
3435 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3437 error ("non-register as LHS of binary operation");
3438 return true;
3441 if (!is_gimple_val (rhs1)
3442 || !is_gimple_val (rhs2))
3444 error ("invalid operands in binary operation");
3445 return true;
3448 /* First handle operations that involve different types. */
3449 switch (rhs_code)
3451 case COMPLEX_EXPR:
3453 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3454 || !(INTEGRAL_TYPE_P (rhs1_type)
3455 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3456 || !(INTEGRAL_TYPE_P (rhs2_type)
3457 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3459 error ("type mismatch in complex expression");
3460 debug_generic_expr (lhs_type);
3461 debug_generic_expr (rhs1_type);
3462 debug_generic_expr (rhs2_type);
3463 return true;
3466 return false;
3469 case LSHIFT_EXPR:
3470 case RSHIFT_EXPR:
3471 case LROTATE_EXPR:
3472 case RROTATE_EXPR:
3474 if (!INTEGRAL_TYPE_P (rhs1_type)
3475 || !INTEGRAL_TYPE_P (rhs2_type)
3476 || !useless_type_conversion_p (lhs_type, rhs1_type))
3478 error ("type mismatch in shift expression");
3479 debug_generic_expr (lhs_type);
3480 debug_generic_expr (rhs1_type);
3481 debug_generic_expr (rhs2_type);
3482 return true;
3485 return false;
3488 case VEC_LSHIFT_EXPR:
3489 case VEC_RSHIFT_EXPR:
3491 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3492 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3493 || (!INTEGRAL_TYPE_P (rhs2_type)
3494 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3495 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3496 || !useless_type_conversion_p (lhs_type, rhs1_type))
3498 error ("type mismatch in vector shift expression");
3499 debug_generic_expr (lhs_type);
3500 debug_generic_expr (rhs1_type);
3501 debug_generic_expr (rhs2_type);
3502 return true;
3505 return false;
3508 case POINTER_PLUS_EXPR:
3510 if (!POINTER_TYPE_P (rhs1_type)
3511 || !useless_type_conversion_p (lhs_type, rhs1_type)
3512 || !useless_type_conversion_p (sizetype, rhs2_type))
3514 error ("type mismatch in pointer plus expression");
3515 debug_generic_stmt (lhs_type);
3516 debug_generic_stmt (rhs1_type);
3517 debug_generic_stmt (rhs2_type);
3518 return true;
3521 return false;
3524 case TRUTH_ANDIF_EXPR:
3525 case TRUTH_ORIF_EXPR:
3526 gcc_unreachable ();
3528 case TRUTH_AND_EXPR:
3529 case TRUTH_OR_EXPR:
3530 case TRUTH_XOR_EXPR:
3532 /* We allow any kind of integral typed argument and result. */
3533 if (!INTEGRAL_TYPE_P (rhs1_type)
3534 || !INTEGRAL_TYPE_P (rhs2_type)
3535 || !INTEGRAL_TYPE_P (lhs_type))
3537 error ("type mismatch in binary truth expression");
3538 debug_generic_expr (lhs_type);
3539 debug_generic_expr (rhs1_type);
3540 debug_generic_expr (rhs2_type);
3541 return true;
3544 return false;
3547 case LT_EXPR:
3548 case LE_EXPR:
3549 case GT_EXPR:
3550 case GE_EXPR:
3551 case EQ_EXPR:
3552 case NE_EXPR:
3553 case UNORDERED_EXPR:
3554 case ORDERED_EXPR:
3555 case UNLT_EXPR:
3556 case UNLE_EXPR:
3557 case UNGT_EXPR:
3558 case UNGE_EXPR:
3559 case UNEQ_EXPR:
3560 case LTGT_EXPR:
3561 /* Comparisons are also binary, but the result type is not
3562 connected to the operand types. */
3563 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3565 case PLUS_EXPR:
3566 case MINUS_EXPR:
3568 if (POINTER_TYPE_P (lhs_type)
3569 || POINTER_TYPE_P (rhs1_type)
3570 || POINTER_TYPE_P (rhs2_type))
3572 error ("invalid (pointer) operands to plus/minus");
3573 return true;
3576 /* Continue with generic binary expression handling. */
3577 break;
3580 case MULT_EXPR:
3581 case TRUNC_DIV_EXPR:
3582 case CEIL_DIV_EXPR:
3583 case FLOOR_DIV_EXPR:
3584 case ROUND_DIV_EXPR:
3585 case TRUNC_MOD_EXPR:
3586 case CEIL_MOD_EXPR:
3587 case FLOOR_MOD_EXPR:
3588 case ROUND_MOD_EXPR:
3589 case RDIV_EXPR:
3590 case EXACT_DIV_EXPR:
3591 case MIN_EXPR:
3592 case MAX_EXPR:
3593 case BIT_IOR_EXPR:
3594 case BIT_XOR_EXPR:
3595 case BIT_AND_EXPR:
3596 case WIDEN_SUM_EXPR:
3597 case WIDEN_MULT_EXPR:
3598 case VEC_WIDEN_MULT_HI_EXPR:
3599 case VEC_WIDEN_MULT_LO_EXPR:
3600 case VEC_PACK_TRUNC_EXPR:
3601 case VEC_PACK_SAT_EXPR:
3602 case VEC_PACK_FIX_TRUNC_EXPR:
3603 case VEC_EXTRACT_EVEN_EXPR:
3604 case VEC_EXTRACT_ODD_EXPR:
3605 case VEC_INTERLEAVE_HIGH_EXPR:
3606 case VEC_INTERLEAVE_LOW_EXPR:
3607 /* Continue with generic binary expression handling. */
3608 break;
3610 default:
3611 gcc_unreachable ();
3614 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3615 || !useless_type_conversion_p (lhs_type, rhs2_type))
3617 error ("type mismatch in binary expression");
3618 debug_generic_stmt (lhs_type);
3619 debug_generic_stmt (rhs1_type);
3620 debug_generic_stmt (rhs2_type);
3621 return true;
3624 return false;
3627 /* Verify a gimple assignment statement STMT with a single rhs.
3628 Returns true if anything is wrong. */
3630 static bool
3631 verify_gimple_assign_single (gimple stmt)
3633 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3634 tree lhs = gimple_assign_lhs (stmt);
3635 tree lhs_type = TREE_TYPE (lhs);
3636 tree rhs1 = gimple_assign_rhs1 (stmt);
3637 tree rhs1_type = TREE_TYPE (rhs1);
3638 bool res = false;
3640 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3642 error ("non-trivial conversion at assignment");
3643 debug_generic_expr (lhs_type);
3644 debug_generic_expr (rhs1_type);
3645 return true;
3648 if (handled_component_p (lhs))
3649 res |= verify_types_in_gimple_reference (lhs);
3651 /* Special codes we cannot handle via their class. */
3652 switch (rhs_code)
3654 case ADDR_EXPR:
3656 tree op = TREE_OPERAND (rhs1, 0);
3657 if (!is_gimple_addressable (op))
3659 error ("invalid operand in unary expression");
3660 return true;
3663 if (!one_pointer_to_useless_type_conversion_p (lhs_type, TREE_TYPE (op))
3664 /* FIXME: a longstanding wart, &a == &a[0]. */
3665 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
3666 || !one_pointer_to_useless_type_conversion_p (lhs_type,
3667 TREE_TYPE (TREE_TYPE (op)))))
3669 error ("type mismatch in address expression");
3670 debug_generic_stmt (lhs_type);
3671 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3672 return true;
3675 return verify_types_in_gimple_reference (op);
3678 /* tcc_reference */
3679 case COMPONENT_REF:
3680 case BIT_FIELD_REF:
3681 case INDIRECT_REF:
3682 case ALIGN_INDIRECT_REF:
3683 case MISALIGNED_INDIRECT_REF:
3684 case ARRAY_REF:
3685 case ARRAY_RANGE_REF:
3686 case VIEW_CONVERT_EXPR:
3687 case REALPART_EXPR:
3688 case IMAGPART_EXPR:
3689 case TARGET_MEM_REF:
3690 if (!is_gimple_reg (lhs)
3691 && is_gimple_reg_type (TREE_TYPE (lhs)))
3693 error ("invalid rhs for gimple memory store");
3694 debug_generic_stmt (lhs);
3695 debug_generic_stmt (rhs1);
3696 return true;
3698 return res || verify_types_in_gimple_reference (rhs1);
3700 /* tcc_constant */
3701 case SSA_NAME:
3702 case INTEGER_CST:
3703 case REAL_CST:
3704 case FIXED_CST:
3705 case COMPLEX_CST:
3706 case VECTOR_CST:
3707 case STRING_CST:
3708 return res;
3710 /* tcc_declaration */
3711 case CONST_DECL:
3712 return res;
3713 case VAR_DECL:
3714 case PARM_DECL:
3715 if (!is_gimple_reg (lhs)
3716 && !is_gimple_reg (rhs1)
3717 && is_gimple_reg_type (TREE_TYPE (lhs)))
3719 error ("invalid rhs for gimple memory store");
3720 debug_generic_stmt (lhs);
3721 debug_generic_stmt (rhs1);
3722 return true;
3724 return res;
3726 case COND_EXPR:
3727 case CONSTRUCTOR:
3728 case OBJ_TYPE_REF:
3729 case ASSERT_EXPR:
3730 case WITH_SIZE_EXPR:
3731 case EXC_PTR_EXPR:
3732 case FILTER_EXPR:
3733 case POLYNOMIAL_CHREC:
3734 case DOT_PROD_EXPR:
3735 case VEC_COND_EXPR:
3736 case REALIGN_LOAD_EXPR:
3737 /* FIXME. */
3738 return res;
3740 default:;
3743 return res;
3746 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3747 is a problem, otherwise false. */
3749 static bool
3750 verify_gimple_assign (gimple stmt)
3752 switch (gimple_assign_rhs_class (stmt))
3754 case GIMPLE_SINGLE_RHS:
3755 return verify_gimple_assign_single (stmt);
3757 case GIMPLE_UNARY_RHS:
3758 return verify_gimple_assign_unary (stmt);
3760 case GIMPLE_BINARY_RHS:
3761 return verify_gimple_assign_binary (stmt);
3763 default:
3764 gcc_unreachable ();
3768 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3769 is a problem, otherwise false. */
3771 static bool
3772 verify_gimple_return (gimple stmt)
3774 tree op = gimple_return_retval (stmt);
3775 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3777 /* We cannot test for present return values as we do not fix up missing
3778 return values from the original source. */
3779 if (op == NULL)
3780 return false;
3782 if (!is_gimple_val (op)
3783 && TREE_CODE (op) != RESULT_DECL)
3785 error ("invalid operand in return statement");
3786 debug_generic_stmt (op);
3787 return true;
3790 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3791 /* ??? With C++ we can have the situation that the result
3792 decl is a reference type while the return type is an aggregate. */
3793 && !(TREE_CODE (op) == RESULT_DECL
3794 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3795 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3797 error ("invalid conversion in return statement");
3798 debug_generic_stmt (restype);
3799 debug_generic_stmt (TREE_TYPE (op));
3800 return true;
3803 return false;
3807 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3808 is a problem, otherwise false. */
3810 static bool
3811 verify_gimple_goto (gimple stmt)
3813 tree dest = gimple_goto_dest (stmt);
3815 /* ??? We have two canonical forms of direct goto destinations, a
3816 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3817 if (TREE_CODE (dest) != LABEL_DECL
3818 && (!is_gimple_val (dest)
3819 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3821 error ("goto destination is neither a label nor a pointer");
3822 return true;
3825 return false;
3828 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3829 is a problem, otherwise false. */
3831 static bool
3832 verify_gimple_switch (gimple stmt)
3834 if (!is_gimple_val (gimple_switch_index (stmt)))
3836 error ("invalid operand to switch statement");
3837 debug_generic_stmt (gimple_switch_index (stmt));
3838 return true;
3841 return false;
3845 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3846 and false otherwise. */
3848 static bool
3849 verify_gimple_phi (gimple stmt)
3851 tree type = TREE_TYPE (gimple_phi_result (stmt));
3852 unsigned i;
3854 if (!is_gimple_variable (gimple_phi_result (stmt)))
3856 error ("Invalid PHI result");
3857 return true;
3860 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3862 tree arg = gimple_phi_arg_def (stmt, i);
3863 if ((is_gimple_reg (gimple_phi_result (stmt))
3864 && !is_gimple_val (arg))
3865 || (!is_gimple_reg (gimple_phi_result (stmt))
3866 && !is_gimple_addressable (arg)))
3868 error ("Invalid PHI argument");
3869 debug_generic_stmt (arg);
3870 return true;
3872 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3874 error ("Incompatible types in PHI argument");
3875 debug_generic_stmt (type);
3876 debug_generic_stmt (TREE_TYPE (arg));
3877 return true;
3881 return false;
3885 /* Verify the GIMPLE statement STMT. Returns true if there is an
3886 error, otherwise false. */
3888 static bool
3889 verify_types_in_gimple_stmt (gimple stmt)
3891 if (is_gimple_omp (stmt))
3893 /* OpenMP directives are validated by the FE and never operated
3894 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3895 non-gimple expressions when the main index variable has had
3896 its address taken. This does not affect the loop itself
3897 because the header of an GIMPLE_OMP_FOR is merely used to determine
3898 how to setup the parallel iteration. */
3899 return false;
3902 switch (gimple_code (stmt))
3904 case GIMPLE_ASSIGN:
3905 return verify_gimple_assign (stmt);
3907 case GIMPLE_LABEL:
3908 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3910 case GIMPLE_CALL:
3911 return verify_gimple_call (stmt);
3913 case GIMPLE_COND:
3914 return verify_gimple_comparison (boolean_type_node,
3915 gimple_cond_lhs (stmt),
3916 gimple_cond_rhs (stmt));
3918 case GIMPLE_GOTO:
3919 return verify_gimple_goto (stmt);
3921 case GIMPLE_SWITCH:
3922 return verify_gimple_switch (stmt);
3924 case GIMPLE_RETURN:
3925 return verify_gimple_return (stmt);
3927 case GIMPLE_ASM:
3928 return false;
3930 case GIMPLE_CHANGE_DYNAMIC_TYPE:
3931 return (!is_gimple_val (gimple_cdt_location (stmt))
3932 || !POINTER_TYPE_P (TREE_TYPE (gimple_cdt_location (stmt))));
3934 case GIMPLE_PHI:
3935 return verify_gimple_phi (stmt);
3937 /* Tuples that do not have tree operands. */
3938 case GIMPLE_NOP:
3939 case GIMPLE_RESX:
3940 case GIMPLE_PREDICT:
3941 return false;
3943 default:
3944 gcc_unreachable ();
3948 /* Verify the GIMPLE statements inside the sequence STMTS. */
3950 static bool
3951 verify_types_in_gimple_seq_2 (gimple_seq stmts)
3953 gimple_stmt_iterator ittr;
3954 bool err = false;
3956 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3958 gimple stmt = gsi_stmt (ittr);
3960 switch (gimple_code (stmt))
3962 case GIMPLE_BIND:
3963 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3964 break;
3966 case GIMPLE_TRY:
3967 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3968 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3969 break;
3971 case GIMPLE_EH_FILTER:
3972 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3973 break;
3975 case GIMPLE_CATCH:
3976 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3977 break;
3979 default:
3981 bool err2 = verify_types_in_gimple_stmt (stmt);
3982 if (err2)
3983 debug_gimple_stmt (stmt);
3984 err |= err2;
3989 return err;
3993 /* Verify the GIMPLE statements inside the statement list STMTS. */
3995 void
3996 verify_types_in_gimple_seq (gimple_seq stmts)
3998 if (verify_types_in_gimple_seq_2 (stmts))
3999 internal_error ("verify_gimple failed");
4003 /* Verify STMT, return true if STMT is not in GIMPLE form.
4004 TODO: Implement type checking. */
4006 static bool
4007 verify_stmt (gimple_stmt_iterator *gsi)
4009 tree addr;
4010 struct walk_stmt_info wi;
4011 bool last_in_block = gsi_one_before_end_p (*gsi);
4012 gimple stmt = gsi_stmt (*gsi);
4014 if (is_gimple_omp (stmt))
4016 /* OpenMP directives are validated by the FE and never operated
4017 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4018 non-gimple expressions when the main index variable has had
4019 its address taken. This does not affect the loop itself
4020 because the header of an GIMPLE_OMP_FOR is merely used to determine
4021 how to setup the parallel iteration. */
4022 return false;
4025 /* FIXME. The C frontend passes unpromoted arguments in case it
4026 didn't see a function declaration before the call. */
4027 if (is_gimple_call (stmt))
4029 tree decl;
4031 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4033 error ("invalid function in call statement");
4034 return true;
4037 decl = gimple_call_fndecl (stmt);
4038 if (decl
4039 && TREE_CODE (decl) == FUNCTION_DECL
4040 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4041 && (!DECL_PURE_P (decl))
4042 && (!TREE_READONLY (decl)))
4044 error ("invalid pure const state for function");
4045 return true;
4049 memset (&wi, 0, sizeof (wi));
4050 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4051 if (addr)
4053 debug_generic_expr (addr);
4054 inform (input_location, "in statement");
4055 debug_gimple_stmt (stmt);
4056 return true;
4059 /* If the statement is marked as part of an EH region, then it is
4060 expected that the statement could throw. Verify that when we
4061 have optimizations that simplify statements such that we prove
4062 that they cannot throw, that we update other data structures
4063 to match. */
4064 if (lookup_stmt_eh_region (stmt) >= 0)
4066 if (!stmt_could_throw_p (stmt))
4068 error ("statement marked for throw, but doesn%'t");
4069 goto fail;
4071 if (!last_in_block && stmt_can_throw_internal (stmt))
4073 error ("statement marked for throw in middle of block");
4074 goto fail;
4078 return false;
4080 fail:
4081 debug_gimple_stmt (stmt);
4082 return true;
4086 /* Return true when the T can be shared. */
4088 static bool
4089 tree_node_can_be_shared (tree t)
4091 if (IS_TYPE_OR_DECL_P (t)
4092 || is_gimple_min_invariant (t)
4093 || TREE_CODE (t) == SSA_NAME
4094 || t == error_mark_node
4095 || TREE_CODE (t) == IDENTIFIER_NODE)
4096 return true;
4098 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4099 return true;
4101 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4102 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4103 || TREE_CODE (t) == COMPONENT_REF
4104 || TREE_CODE (t) == REALPART_EXPR
4105 || TREE_CODE (t) == IMAGPART_EXPR)
4106 t = TREE_OPERAND (t, 0);
4108 if (DECL_P (t))
4109 return true;
4111 return false;
4115 /* Called via walk_gimple_stmt. Verify tree sharing. */
4117 static tree
4118 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4120 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4121 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4123 if (tree_node_can_be_shared (*tp))
4125 *walk_subtrees = false;
4126 return NULL;
4129 if (pointer_set_insert (visited, *tp))
4130 return *tp;
4132 return NULL;
4136 static bool eh_error_found;
4137 static int
4138 verify_eh_throw_stmt_node (void **slot, void *data)
4140 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4141 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4143 if (!pointer_set_contains (visited, node->stmt))
4145 error ("Dead STMT in EH table");
4146 debug_gimple_stmt (node->stmt);
4147 eh_error_found = true;
4149 return 0;
4153 /* Verify the GIMPLE statements in every basic block. */
4155 void
4156 verify_stmts (void)
4158 basic_block bb;
4159 gimple_stmt_iterator gsi;
4160 bool err = false;
4161 struct pointer_set_t *visited, *visited_stmts;
4162 tree addr;
4163 struct walk_stmt_info wi;
4165 timevar_push (TV_TREE_STMT_VERIFY);
4166 visited = pointer_set_create ();
4167 visited_stmts = pointer_set_create ();
4169 memset (&wi, 0, sizeof (wi));
4170 wi.info = (void *) visited;
4172 FOR_EACH_BB (bb)
4174 gimple phi;
4175 size_t i;
4177 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4179 phi = gsi_stmt (gsi);
4180 pointer_set_insert (visited_stmts, phi);
4181 if (gimple_bb (phi) != bb)
4183 error ("gimple_bb (phi) is set to a wrong basic block");
4184 err |= true;
4187 for (i = 0; i < gimple_phi_num_args (phi); i++)
4189 tree t = gimple_phi_arg_def (phi, i);
4190 tree addr;
4192 if (!t)
4194 error ("missing PHI def");
4195 debug_gimple_stmt (phi);
4196 err |= true;
4197 continue;
4199 /* Addressable variables do have SSA_NAMEs but they
4200 are not considered gimple values. */
4201 else if (TREE_CODE (t) != SSA_NAME
4202 && TREE_CODE (t) != FUNCTION_DECL
4203 && !is_gimple_min_invariant (t))
4205 error ("PHI argument is not a GIMPLE value");
4206 debug_gimple_stmt (phi);
4207 debug_generic_expr (t);
4208 err |= true;
4211 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4212 if (addr)
4214 error ("incorrect sharing of tree nodes");
4215 debug_gimple_stmt (phi);
4216 debug_generic_expr (addr);
4217 err |= true;
4222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4224 gimple stmt = gsi_stmt (gsi);
4226 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4227 || gimple_code (stmt) == GIMPLE_BIND)
4229 error ("invalid GIMPLE statement");
4230 debug_gimple_stmt (stmt);
4231 err |= true;
4234 pointer_set_insert (visited_stmts, stmt);
4236 if (gimple_bb (stmt) != bb)
4238 error ("gimple_bb (stmt) is set to a wrong basic block");
4239 err |= true;
4242 if (gimple_code (stmt) == GIMPLE_LABEL)
4244 tree decl = gimple_label_label (stmt);
4245 int uid = LABEL_DECL_UID (decl);
4247 if (uid == -1
4248 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4250 error ("incorrect entry in label_to_block_map.\n");
4251 err |= true;
4255 err |= verify_stmt (&gsi);
4256 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4257 if (addr)
4259 error ("incorrect sharing of tree nodes");
4260 debug_gimple_stmt (stmt);
4261 debug_generic_expr (addr);
4262 err |= true;
4264 gsi_next (&gsi);
4268 eh_error_found = false;
4269 if (get_eh_throw_stmt_table (cfun))
4270 htab_traverse (get_eh_throw_stmt_table (cfun),
4271 verify_eh_throw_stmt_node,
4272 visited_stmts);
4274 if (err | eh_error_found)
4275 internal_error ("verify_stmts failed");
4277 pointer_set_destroy (visited);
4278 pointer_set_destroy (visited_stmts);
4279 verify_histograms ();
4280 timevar_pop (TV_TREE_STMT_VERIFY);
4284 /* Verifies that the flow information is OK. */
4286 static int
4287 gimple_verify_flow_info (void)
4289 int err = 0;
4290 basic_block bb;
4291 gimple_stmt_iterator gsi;
4292 gimple stmt;
4293 edge e;
4294 edge_iterator ei;
4296 if (ENTRY_BLOCK_PTR->il.gimple)
4298 error ("ENTRY_BLOCK has IL associated with it");
4299 err = 1;
4302 if (EXIT_BLOCK_PTR->il.gimple)
4304 error ("EXIT_BLOCK has IL associated with it");
4305 err = 1;
4308 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4309 if (e->flags & EDGE_FALLTHRU)
4311 error ("fallthru to exit from bb %d", e->src->index);
4312 err = 1;
4315 FOR_EACH_BB (bb)
4317 bool found_ctrl_stmt = false;
4319 stmt = NULL;
4321 /* Skip labels on the start of basic block. */
4322 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4324 tree label;
4325 gimple prev_stmt = stmt;
4327 stmt = gsi_stmt (gsi);
4329 if (gimple_code (stmt) != GIMPLE_LABEL)
4330 break;
4332 label = gimple_label_label (stmt);
4333 if (prev_stmt && DECL_NONLOCAL (label))
4335 error ("nonlocal label ");
4336 print_generic_expr (stderr, label, 0);
4337 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4338 bb->index);
4339 err = 1;
4342 if (label_to_block (label) != bb)
4344 error ("label ");
4345 print_generic_expr (stderr, label, 0);
4346 fprintf (stderr, " to block does not match in bb %d",
4347 bb->index);
4348 err = 1;
4351 if (decl_function_context (label) != current_function_decl)
4353 error ("label ");
4354 print_generic_expr (stderr, label, 0);
4355 fprintf (stderr, " has incorrect context in bb %d",
4356 bb->index);
4357 err = 1;
4361 /* Verify that body of basic block BB is free of control flow. */
4362 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4364 gimple stmt = gsi_stmt (gsi);
4366 if (found_ctrl_stmt)
4368 error ("control flow in the middle of basic block %d",
4369 bb->index);
4370 err = 1;
4373 if (stmt_ends_bb_p (stmt))
4374 found_ctrl_stmt = true;
4376 if (gimple_code (stmt) == GIMPLE_LABEL)
4378 error ("label ");
4379 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4380 fprintf (stderr, " in the middle of basic block %d", bb->index);
4381 err = 1;
4385 gsi = gsi_last_bb (bb);
4386 if (gsi_end_p (gsi))
4387 continue;
4389 stmt = gsi_stmt (gsi);
4391 err |= verify_eh_edges (stmt);
4393 if (is_ctrl_stmt (stmt))
4395 FOR_EACH_EDGE (e, ei, bb->succs)
4396 if (e->flags & EDGE_FALLTHRU)
4398 error ("fallthru edge after a control statement in bb %d",
4399 bb->index);
4400 err = 1;
4404 if (gimple_code (stmt) != GIMPLE_COND)
4406 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4407 after anything else but if statement. */
4408 FOR_EACH_EDGE (e, ei, bb->succs)
4409 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4411 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4412 bb->index);
4413 err = 1;
4417 switch (gimple_code (stmt))
4419 case GIMPLE_COND:
4421 edge true_edge;
4422 edge false_edge;
4424 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4426 if (!true_edge
4427 || !false_edge
4428 || !(true_edge->flags & EDGE_TRUE_VALUE)
4429 || !(false_edge->flags & EDGE_FALSE_VALUE)
4430 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4431 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4432 || EDGE_COUNT (bb->succs) >= 3)
4434 error ("wrong outgoing edge flags at end of bb %d",
4435 bb->index);
4436 err = 1;
4439 break;
4441 case GIMPLE_GOTO:
4442 if (simple_goto_p (stmt))
4444 error ("explicit goto at end of bb %d", bb->index);
4445 err = 1;
4447 else
4449 /* FIXME. We should double check that the labels in the
4450 destination blocks have their address taken. */
4451 FOR_EACH_EDGE (e, ei, bb->succs)
4452 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4453 | EDGE_FALSE_VALUE))
4454 || !(e->flags & EDGE_ABNORMAL))
4456 error ("wrong outgoing edge flags at end of bb %d",
4457 bb->index);
4458 err = 1;
4461 break;
4463 case GIMPLE_RETURN:
4464 if (!single_succ_p (bb)
4465 || (single_succ_edge (bb)->flags
4466 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4467 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4469 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4470 err = 1;
4472 if (single_succ (bb) != EXIT_BLOCK_PTR)
4474 error ("return edge does not point to exit in bb %d",
4475 bb->index);
4476 err = 1;
4478 break;
4480 case GIMPLE_SWITCH:
4482 tree prev;
4483 edge e;
4484 size_t i, n;
4486 n = gimple_switch_num_labels (stmt);
4488 /* Mark all the destination basic blocks. */
4489 for (i = 0; i < n; ++i)
4491 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4492 basic_block label_bb = label_to_block (lab);
4493 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4494 label_bb->aux = (void *)1;
4497 /* Verify that the case labels are sorted. */
4498 prev = gimple_switch_label (stmt, 0);
4499 for (i = 1; i < n; ++i)
4501 tree c = gimple_switch_label (stmt, i);
4502 if (!CASE_LOW (c))
4504 error ("found default case not at the start of "
4505 "case vector");
4506 err = 1;
4507 continue;
4509 if (CASE_LOW (prev)
4510 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4512 error ("case labels not sorted: ");
4513 print_generic_expr (stderr, prev, 0);
4514 fprintf (stderr," is greater than ");
4515 print_generic_expr (stderr, c, 0);
4516 fprintf (stderr," but comes before it.\n");
4517 err = 1;
4519 prev = c;
4521 /* VRP will remove the default case if it can prove it will
4522 never be executed. So do not verify there always exists
4523 a default case here. */
4525 FOR_EACH_EDGE (e, ei, bb->succs)
4527 if (!e->dest->aux)
4529 error ("extra outgoing edge %d->%d",
4530 bb->index, e->dest->index);
4531 err = 1;
4534 e->dest->aux = (void *)2;
4535 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4536 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4538 error ("wrong outgoing edge flags at end of bb %d",
4539 bb->index);
4540 err = 1;
4544 /* Check that we have all of them. */
4545 for (i = 0; i < n; ++i)
4547 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4548 basic_block label_bb = label_to_block (lab);
4550 if (label_bb->aux != (void *)2)
4552 error ("missing edge %i->%i", bb->index, label_bb->index);
4553 err = 1;
4557 FOR_EACH_EDGE (e, ei, bb->succs)
4558 e->dest->aux = (void *)0;
4561 default: ;
4565 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4566 verify_dominators (CDI_DOMINATORS);
4568 return err;
4572 /* Updates phi nodes after creating a forwarder block joined
4573 by edge FALLTHRU. */
4575 static void
4576 gimple_make_forwarder_block (edge fallthru)
4578 edge e;
4579 edge_iterator ei;
4580 basic_block dummy, bb;
4581 tree var;
4582 gimple_stmt_iterator gsi;
4584 dummy = fallthru->src;
4585 bb = fallthru->dest;
4587 if (single_pred_p (bb))
4588 return;
4590 /* If we redirected a branch we must create new PHI nodes at the
4591 start of BB. */
4592 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4594 gimple phi, new_phi;
4596 phi = gsi_stmt (gsi);
4597 var = gimple_phi_result (phi);
4598 new_phi = create_phi_node (var, bb);
4599 SSA_NAME_DEF_STMT (var) = new_phi;
4600 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4601 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4604 /* Add the arguments we have stored on edges. */
4605 FOR_EACH_EDGE (e, ei, bb->preds)
4607 if (e == fallthru)
4608 continue;
4610 flush_pending_stmts (e);
4615 /* Return a non-special label in the head of basic block BLOCK.
4616 Create one if it doesn't exist. */
4618 tree
4619 gimple_block_label (basic_block bb)
4621 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4622 bool first = true;
4623 tree label;
4624 gimple stmt;
4626 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4628 stmt = gsi_stmt (i);
4629 if (gimple_code (stmt) != GIMPLE_LABEL)
4630 break;
4631 label = gimple_label_label (stmt);
4632 if (!DECL_NONLOCAL (label))
4634 if (!first)
4635 gsi_move_before (&i, &s);
4636 return label;
4640 label = create_artificial_label ();
4641 stmt = gimple_build_label (label);
4642 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4643 return label;
4647 /* Attempt to perform edge redirection by replacing a possibly complex
4648 jump instruction by a goto or by removing the jump completely.
4649 This can apply only if all edges now point to the same block. The
4650 parameters and return values are equivalent to
4651 redirect_edge_and_branch. */
4653 static edge
4654 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4656 basic_block src = e->src;
4657 gimple_stmt_iterator i;
4658 gimple stmt;
4660 /* We can replace or remove a complex jump only when we have exactly
4661 two edges. */
4662 if (EDGE_COUNT (src->succs) != 2
4663 /* Verify that all targets will be TARGET. Specifically, the
4664 edge that is not E must also go to TARGET. */
4665 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4666 return NULL;
4668 i = gsi_last_bb (src);
4669 if (gsi_end_p (i))
4670 return NULL;
4672 stmt = gsi_stmt (i);
4674 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4676 gsi_remove (&i, true);
4677 e = ssa_redirect_edge (e, target);
4678 e->flags = EDGE_FALLTHRU;
4679 return e;
4682 return NULL;
4686 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4687 edge representing the redirected branch. */
4689 static edge
4690 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4692 basic_block bb = e->src;
4693 gimple_stmt_iterator gsi;
4694 edge ret;
4695 gimple stmt;
4697 if (e->flags & EDGE_ABNORMAL)
4698 return NULL;
4700 if (e->src != ENTRY_BLOCK_PTR
4701 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4702 return ret;
4704 if (e->dest == dest)
4705 return NULL;
4707 gsi = gsi_last_bb (bb);
4708 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4710 switch (stmt ? gimple_code (stmt) : ERROR_MARK)
4712 case GIMPLE_COND:
4713 /* For COND_EXPR, we only need to redirect the edge. */
4714 break;
4716 case GIMPLE_GOTO:
4717 /* No non-abnormal edges should lead from a non-simple goto, and
4718 simple ones should be represented implicitly. */
4719 gcc_unreachable ();
4721 case GIMPLE_SWITCH:
4723 tree label = gimple_block_label (dest);
4724 tree cases = get_cases_for_edge (e, stmt);
4726 /* If we have a list of cases associated with E, then use it
4727 as it's a lot faster than walking the entire case vector. */
4728 if (cases)
4730 edge e2 = find_edge (e->src, dest);
4731 tree last, first;
4733 first = cases;
4734 while (cases)
4736 last = cases;
4737 CASE_LABEL (cases) = label;
4738 cases = TREE_CHAIN (cases);
4741 /* If there was already an edge in the CFG, then we need
4742 to move all the cases associated with E to E2. */
4743 if (e2)
4745 tree cases2 = get_cases_for_edge (e2, stmt);
4747 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4748 TREE_CHAIN (cases2) = first;
4751 else
4753 size_t i, n = gimple_switch_num_labels (stmt);
4755 for (i = 0; i < n; i++)
4757 tree elt = gimple_switch_label (stmt, i);
4758 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4759 CASE_LABEL (elt) = label;
4763 break;
4766 case GIMPLE_RETURN:
4767 gsi_remove (&gsi, true);
4768 e->flags |= EDGE_FALLTHRU;
4769 break;
4771 case GIMPLE_OMP_RETURN:
4772 case GIMPLE_OMP_CONTINUE:
4773 case GIMPLE_OMP_SECTIONS_SWITCH:
4774 case GIMPLE_OMP_FOR:
4775 /* The edges from OMP constructs can be simply redirected. */
4776 break;
4778 default:
4779 /* Otherwise it must be a fallthru edge, and we don't need to
4780 do anything besides redirecting it. */
4781 gcc_assert (e->flags & EDGE_FALLTHRU);
4782 break;
4785 /* Update/insert PHI nodes as necessary. */
4787 /* Now update the edges in the CFG. */
4788 e = ssa_redirect_edge (e, dest);
4790 return e;
4793 /* Returns true if it is possible to remove edge E by redirecting
4794 it to the destination of the other edge from E->src. */
4796 static bool
4797 gimple_can_remove_branch_p (const_edge e)
4799 if (e->flags & EDGE_ABNORMAL)
4800 return false;
4802 return true;
4805 /* Simple wrapper, as we can always redirect fallthru edges. */
4807 static basic_block
4808 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4810 e = gimple_redirect_edge_and_branch (e, dest);
4811 gcc_assert (e);
4813 return NULL;
4817 /* Splits basic block BB after statement STMT (but at least after the
4818 labels). If STMT is NULL, BB is split just after the labels. */
4820 static basic_block
4821 gimple_split_block (basic_block bb, void *stmt)
4823 gimple_stmt_iterator gsi;
4824 gimple_stmt_iterator gsi_tgt;
4825 gimple act;
4826 gimple_seq list;
4827 basic_block new_bb;
4828 edge e;
4829 edge_iterator ei;
4831 new_bb = create_empty_bb (bb);
4833 /* Redirect the outgoing edges. */
4834 new_bb->succs = bb->succs;
4835 bb->succs = NULL;
4836 FOR_EACH_EDGE (e, ei, new_bb->succs)
4837 e->src = new_bb;
4839 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4840 stmt = NULL;
4842 /* Move everything from GSI to the new basic block. */
4843 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4845 act = gsi_stmt (gsi);
4846 if (gimple_code (act) == GIMPLE_LABEL)
4847 continue;
4849 if (!stmt)
4850 break;
4852 if (stmt == act)
4854 gsi_next (&gsi);
4855 break;
4859 if (gsi_end_p (gsi))
4860 return new_bb;
4862 /* Split the statement list - avoid re-creating new containers as this
4863 brings ugly quadratic memory consumption in the inliner.
4864 (We are still quadratic since we need to update stmt BB pointers,
4865 sadly.) */
4866 list = gsi_split_seq_before (&gsi);
4867 set_bb_seq (new_bb, list);
4868 for (gsi_tgt = gsi_start (list);
4869 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4870 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4872 return new_bb;
4876 /* Moves basic block BB after block AFTER. */
4878 static bool
4879 gimple_move_block_after (basic_block bb, basic_block after)
4881 if (bb->prev_bb == after)
4882 return true;
4884 unlink_block (bb);
4885 link_block (bb, after);
4887 return true;
4891 /* Return true if basic_block can be duplicated. */
4893 static bool
4894 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4896 return true;
4899 /* Create a duplicate of the basic block BB. NOTE: This does not
4900 preserve SSA form. */
4902 static basic_block
4903 gimple_duplicate_bb (basic_block bb)
4905 basic_block new_bb;
4906 gimple_stmt_iterator gsi, gsi_tgt;
4907 gimple_seq phis = phi_nodes (bb);
4908 gimple phi, stmt, copy;
4910 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4912 /* Copy the PHI nodes. We ignore PHI node arguments here because
4913 the incoming edges have not been setup yet. */
4914 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4916 phi = gsi_stmt (gsi);
4917 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4918 create_new_def_for (gimple_phi_result (copy), copy,
4919 gimple_phi_result_ptr (copy));
4922 gsi_tgt = gsi_start_bb (new_bb);
4923 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4925 def_operand_p def_p;
4926 ssa_op_iter op_iter;
4927 int region;
4929 stmt = gsi_stmt (gsi);
4930 if (gimple_code (stmt) == GIMPLE_LABEL)
4931 continue;
4933 /* Create a new copy of STMT and duplicate STMT's virtual
4934 operands. */
4935 copy = gimple_copy (stmt);
4936 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
4937 copy_virtual_operands (copy, stmt);
4938 region = lookup_stmt_eh_region (stmt);
4939 if (region >= 0)
4940 add_stmt_to_eh_region (copy, region);
4941 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4943 /* Create new names for all the definitions created by COPY and
4944 add replacement mappings for each new name. */
4945 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4946 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4949 return new_bb;
4952 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
4954 static void
4955 add_phi_args_after_copy_edge (edge e_copy)
4957 basic_block bb, bb_copy = e_copy->src, dest;
4958 edge e;
4959 edge_iterator ei;
4960 gimple phi, phi_copy;
4961 tree def;
4962 gimple_stmt_iterator psi, psi_copy;
4964 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
4965 return;
4967 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4969 if (e_copy->dest->flags & BB_DUPLICATED)
4970 dest = get_bb_original (e_copy->dest);
4971 else
4972 dest = e_copy->dest;
4974 e = find_edge (bb, dest);
4975 if (!e)
4977 /* During loop unrolling the target of the latch edge is copied.
4978 In this case we are not looking for edge to dest, but to
4979 duplicated block whose original was dest. */
4980 FOR_EACH_EDGE (e, ei, bb->succs)
4982 if ((e->dest->flags & BB_DUPLICATED)
4983 && get_bb_original (e->dest) == dest)
4984 break;
4987 gcc_assert (e != NULL);
4990 for (psi = gsi_start_phis (e->dest),
4991 psi_copy = gsi_start_phis (e_copy->dest);
4992 !gsi_end_p (psi);
4993 gsi_next (&psi), gsi_next (&psi_copy))
4995 phi = gsi_stmt (psi);
4996 phi_copy = gsi_stmt (psi_copy);
4997 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4998 add_phi_arg (phi_copy, def, e_copy);
5003 /* Basic block BB_COPY was created by code duplication. Add phi node
5004 arguments for edges going out of BB_COPY. The blocks that were
5005 duplicated have BB_DUPLICATED set. */
5007 void
5008 add_phi_args_after_copy_bb (basic_block bb_copy)
5010 edge e_copy;
5011 edge_iterator ei;
5013 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5015 add_phi_args_after_copy_edge (e_copy);
5019 /* Blocks in REGION_COPY array of length N_REGION were created by
5020 duplication of basic blocks. Add phi node arguments for edges
5021 going from these blocks. If E_COPY is not NULL, also add
5022 phi node arguments for its destination.*/
5024 void
5025 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5026 edge e_copy)
5028 unsigned i;
5030 for (i = 0; i < n_region; i++)
5031 region_copy[i]->flags |= BB_DUPLICATED;
5033 for (i = 0; i < n_region; i++)
5034 add_phi_args_after_copy_bb (region_copy[i]);
5035 if (e_copy)
5036 add_phi_args_after_copy_edge (e_copy);
5038 for (i = 0; i < n_region; i++)
5039 region_copy[i]->flags &= ~BB_DUPLICATED;
5042 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5043 important exit edge EXIT. By important we mean that no SSA name defined
5044 inside region is live over the other exit edges of the region. All entry
5045 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5046 to the duplicate of the region. SSA form, dominance and loop information
5047 is updated. The new basic blocks are stored to REGION_COPY in the same
5048 order as they had in REGION, provided that REGION_COPY is not NULL.
5049 The function returns false if it is unable to copy the region,
5050 true otherwise. */
5052 bool
5053 gimple_duplicate_sese_region (edge entry, edge exit,
5054 basic_block *region, unsigned n_region,
5055 basic_block *region_copy)
5057 unsigned i;
5058 bool free_region_copy = false, copying_header = false;
5059 struct loop *loop = entry->dest->loop_father;
5060 edge exit_copy;
5061 VEC (basic_block, heap) *doms;
5062 edge redirected;
5063 int total_freq = 0, entry_freq = 0;
5064 gcov_type total_count = 0, entry_count = 0;
5066 if (!can_copy_bbs_p (region, n_region))
5067 return false;
5069 /* Some sanity checking. Note that we do not check for all possible
5070 missuses of the functions. I.e. if you ask to copy something weird,
5071 it will work, but the state of structures probably will not be
5072 correct. */
5073 for (i = 0; i < n_region; i++)
5075 /* We do not handle subloops, i.e. all the blocks must belong to the
5076 same loop. */
5077 if (region[i]->loop_father != loop)
5078 return false;
5080 if (region[i] != entry->dest
5081 && region[i] == loop->header)
5082 return false;
5085 set_loop_copy (loop, loop);
5087 /* In case the function is used for loop header copying (which is the primary
5088 use), ensure that EXIT and its copy will be new latch and entry edges. */
5089 if (loop->header == entry->dest)
5091 copying_header = true;
5092 set_loop_copy (loop, loop_outer (loop));
5094 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5095 return false;
5097 for (i = 0; i < n_region; i++)
5098 if (region[i] != exit->src
5099 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5100 return false;
5103 if (!region_copy)
5105 region_copy = XNEWVEC (basic_block, n_region);
5106 free_region_copy = true;
5109 gcc_assert (!need_ssa_update_p ());
5111 /* Record blocks outside the region that are dominated by something
5112 inside. */
5113 doms = NULL;
5114 initialize_original_copy_tables ();
5116 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5118 if (entry->dest->count)
5120 total_count = entry->dest->count;
5121 entry_count = entry->count;
5122 /* Fix up corner cases, to avoid division by zero or creation of negative
5123 frequencies. */
5124 if (entry_count > total_count)
5125 entry_count = total_count;
5127 else
5129 total_freq = entry->dest->frequency;
5130 entry_freq = EDGE_FREQUENCY (entry);
5131 /* Fix up corner cases, to avoid division by zero or creation of negative
5132 frequencies. */
5133 if (total_freq == 0)
5134 total_freq = 1;
5135 else if (entry_freq > total_freq)
5136 entry_freq = total_freq;
5139 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5140 split_edge_bb_loc (entry));
5141 if (total_count)
5143 scale_bbs_frequencies_gcov_type (region, n_region,
5144 total_count - entry_count,
5145 total_count);
5146 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5147 total_count);
5149 else
5151 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5152 total_freq);
5153 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5156 if (copying_header)
5158 loop->header = exit->dest;
5159 loop->latch = exit->src;
5162 /* Redirect the entry and add the phi node arguments. */
5163 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5164 gcc_assert (redirected != NULL);
5165 flush_pending_stmts (entry);
5167 /* Concerning updating of dominators: We must recount dominators
5168 for entry block and its copy. Anything that is outside of the
5169 region, but was dominated by something inside needs recounting as
5170 well. */
5171 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5172 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5173 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5174 VEC_free (basic_block, heap, doms);
5176 /* Add the other PHI node arguments. */
5177 add_phi_args_after_copy (region_copy, n_region, NULL);
5179 /* Update the SSA web. */
5180 update_ssa (TODO_update_ssa);
5182 if (free_region_copy)
5183 free (region_copy);
5185 free_original_copy_tables ();
5186 return true;
5189 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5190 are stored to REGION_COPY in the same order in that they appear
5191 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5192 the region, EXIT an exit from it. The condition guarding EXIT
5193 is moved to ENTRY. Returns true if duplication succeeds, false
5194 otherwise.
5196 For example,
5198 some_code;
5199 if (cond)
5201 else
5204 is transformed to
5206 if (cond)
5208 some_code;
5211 else
5213 some_code;
5218 bool
5219 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5220 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5221 basic_block *region_copy ATTRIBUTE_UNUSED)
5223 unsigned i;
5224 bool free_region_copy = false;
5225 struct loop *loop = exit->dest->loop_father;
5226 struct loop *orig_loop = entry->dest->loop_father;
5227 basic_block switch_bb, entry_bb, nentry_bb;
5228 VEC (basic_block, heap) *doms;
5229 int total_freq = 0, exit_freq = 0;
5230 gcov_type total_count = 0, exit_count = 0;
5231 edge exits[2], nexits[2], e;
5232 gimple_stmt_iterator gsi;
5233 gimple cond_stmt;
5234 edge sorig, snew;
5236 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5237 exits[0] = exit;
5238 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5240 if (!can_copy_bbs_p (region, n_region))
5241 return false;
5243 /* Some sanity checking. Note that we do not check for all possible
5244 missuses of the functions. I.e. if you ask to copy something weird
5245 (e.g., in the example, if there is a jump from inside to the middle
5246 of some_code, or come_code defines some of the values used in cond)
5247 it will work, but the resulting code will not be correct. */
5248 for (i = 0; i < n_region; i++)
5250 /* We do not handle subloops, i.e. all the blocks must belong to the
5251 same loop. */
5252 if (region[i]->loop_father != orig_loop)
5253 return false;
5255 if (region[i] == orig_loop->latch)
5256 return false;
5259 initialize_original_copy_tables ();
5260 set_loop_copy (orig_loop, loop);
5262 if (!region_copy)
5264 region_copy = XNEWVEC (basic_block, n_region);
5265 free_region_copy = true;
5268 gcc_assert (!need_ssa_update_p ());
5270 /* Record blocks outside the region that are dominated by something
5271 inside. */
5272 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5274 if (exit->src->count)
5276 total_count = exit->src->count;
5277 exit_count = exit->count;
5278 /* Fix up corner cases, to avoid division by zero or creation of negative
5279 frequencies. */
5280 if (exit_count > total_count)
5281 exit_count = total_count;
5283 else
5285 total_freq = exit->src->frequency;
5286 exit_freq = EDGE_FREQUENCY (exit);
5287 /* Fix up corner cases, to avoid division by zero or creation of negative
5288 frequencies. */
5289 if (total_freq == 0)
5290 total_freq = 1;
5291 if (exit_freq > total_freq)
5292 exit_freq = total_freq;
5295 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5296 split_edge_bb_loc (exit));
5297 if (total_count)
5299 scale_bbs_frequencies_gcov_type (region, n_region,
5300 total_count - exit_count,
5301 total_count);
5302 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5303 total_count);
5305 else
5307 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5308 total_freq);
5309 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5312 /* Create the switch block, and put the exit condition to it. */
5313 entry_bb = entry->dest;
5314 nentry_bb = get_bb_copy (entry_bb);
5315 if (!last_stmt (entry->src)
5316 || !stmt_ends_bb_p (last_stmt (entry->src)))
5317 switch_bb = entry->src;
5318 else
5319 switch_bb = split_edge (entry);
5320 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5322 gsi = gsi_last_bb (switch_bb);
5323 cond_stmt = last_stmt (exit->src);
5324 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5325 cond_stmt = gimple_copy (cond_stmt);
5326 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5327 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5328 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5330 sorig = single_succ_edge (switch_bb);
5331 sorig->flags = exits[1]->flags;
5332 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5334 /* Register the new edge from SWITCH_BB in loop exit lists. */
5335 rescan_loop_exit (snew, true, false);
5337 /* Add the PHI node arguments. */
5338 add_phi_args_after_copy (region_copy, n_region, snew);
5340 /* Get rid of now superfluous conditions and associated edges (and phi node
5341 arguments). */
5342 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5343 PENDING_STMT (e) = NULL;
5344 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5345 PENDING_STMT (e) = NULL;
5347 /* Anything that is outside of the region, but was dominated by something
5348 inside needs to update dominance info. */
5349 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5350 VEC_free (basic_block, heap, doms);
5352 /* Update the SSA web. */
5353 update_ssa (TODO_update_ssa);
5355 if (free_region_copy)
5356 free (region_copy);
5358 free_original_copy_tables ();
5359 return true;
5362 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5363 adding blocks when the dominator traversal reaches EXIT. This
5364 function silently assumes that ENTRY strictly dominates EXIT. */
5366 void
5367 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5368 VEC(basic_block,heap) **bbs_p)
5370 basic_block son;
5372 for (son = first_dom_son (CDI_DOMINATORS, entry);
5373 son;
5374 son = next_dom_son (CDI_DOMINATORS, son))
5376 VEC_safe_push (basic_block, heap, *bbs_p, son);
5377 if (son != exit)
5378 gather_blocks_in_sese_region (son, exit, bbs_p);
5382 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5383 The duplicates are recorded in VARS_MAP. */
5385 static void
5386 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5387 tree to_context)
5389 tree t = *tp, new_t;
5390 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5391 void **loc;
5393 if (DECL_CONTEXT (t) == to_context)
5394 return;
5396 loc = pointer_map_contains (vars_map, t);
5398 if (!loc)
5400 loc = pointer_map_insert (vars_map, t);
5402 if (SSA_VAR_P (t))
5404 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5405 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5407 else
5409 gcc_assert (TREE_CODE (t) == CONST_DECL);
5410 new_t = copy_node (t);
5412 DECL_CONTEXT (new_t) = to_context;
5414 *loc = new_t;
5416 else
5417 new_t = (tree) *loc;
5419 *tp = new_t;
5423 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5424 VARS_MAP maps old ssa names and var_decls to the new ones. */
5426 static tree
5427 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5428 tree to_context)
5430 void **loc;
5431 tree new_name, decl = SSA_NAME_VAR (name);
5433 gcc_assert (is_gimple_reg (name));
5435 loc = pointer_map_contains (vars_map, name);
5437 if (!loc)
5439 replace_by_duplicate_decl (&decl, vars_map, to_context);
5441 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5442 if (gimple_in_ssa_p (cfun))
5443 add_referenced_var (decl);
5445 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5446 if (SSA_NAME_IS_DEFAULT_DEF (name))
5447 set_default_def (decl, new_name);
5448 pop_cfun ();
5450 loc = pointer_map_insert (vars_map, name);
5451 *loc = new_name;
5453 else
5454 new_name = (tree) *loc;
5456 return new_name;
5459 struct move_stmt_d
5461 tree orig_block;
5462 tree new_block;
5463 tree from_context;
5464 tree to_context;
5465 struct pointer_map_t *vars_map;
5466 htab_t new_label_map;
5467 bool remap_decls_p;
5470 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5471 contained in *TP if it has been ORIG_BLOCK previously and change the
5472 DECL_CONTEXT of every local variable referenced in *TP. */
5474 static tree
5475 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5477 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5478 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5479 tree t = *tp;
5481 if (EXPR_P (t))
5482 /* We should never have TREE_BLOCK set on non-statements. */
5483 gcc_assert (!TREE_BLOCK (t));
5485 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5487 if (TREE_CODE (t) == SSA_NAME)
5488 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5489 else if (TREE_CODE (t) == LABEL_DECL)
5491 if (p->new_label_map)
5493 struct tree_map in, *out;
5494 in.base.from = t;
5495 out = (struct tree_map *)
5496 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5497 if (out)
5498 *tp = t = out->to;
5501 DECL_CONTEXT (t) = p->to_context;
5503 else if (p->remap_decls_p)
5505 /* Replace T with its duplicate. T should no longer appear in the
5506 parent function, so this looks wasteful; however, it may appear
5507 in referenced_vars, and more importantly, as virtual operands of
5508 statements, and in alias lists of other variables. It would be
5509 quite difficult to expunge it from all those places. ??? It might
5510 suffice to do this for addressable variables. */
5511 if ((TREE_CODE (t) == VAR_DECL
5512 && !is_global_var (t))
5513 || TREE_CODE (t) == CONST_DECL)
5514 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5516 if (SSA_VAR_P (t)
5517 && gimple_in_ssa_p (cfun))
5519 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5520 add_referenced_var (*tp);
5521 pop_cfun ();
5524 *walk_subtrees = 0;
5526 else if (TYPE_P (t))
5527 *walk_subtrees = 0;
5529 return NULL_TREE;
5532 /* Like move_stmt_op, but for gimple statements.
5534 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5535 contained in the current statement in *GSI_P and change the
5536 DECL_CONTEXT of every local variable referenced in the current
5537 statement. */
5539 static tree
5540 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5541 struct walk_stmt_info *wi)
5543 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5544 gimple stmt = gsi_stmt (*gsi_p);
5545 tree block = gimple_block (stmt);
5547 if (p->orig_block == NULL_TREE
5548 || block == p->orig_block
5549 || block == NULL_TREE)
5550 gimple_set_block (stmt, p->new_block);
5551 #ifdef ENABLE_CHECKING
5552 else if (block != p->new_block)
5554 while (block && block != p->orig_block)
5555 block = BLOCK_SUPERCONTEXT (block);
5556 gcc_assert (block);
5558 #endif
5560 if (is_gimple_omp (stmt)
5561 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5562 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5564 /* Do not remap variables inside OMP directives. Variables
5565 referenced in clauses and directive header belong to the
5566 parent function and should not be moved into the child
5567 function. */
5568 bool save_remap_decls_p = p->remap_decls_p;
5569 p->remap_decls_p = false;
5570 *handled_ops_p = true;
5572 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5574 p->remap_decls_p = save_remap_decls_p;
5577 return NULL_TREE;
5580 /* Marks virtual operands of all statements in basic blocks BBS for
5581 renaming. */
5583 void
5584 mark_virtual_ops_in_bb (basic_block bb)
5586 gimple_stmt_iterator gsi;
5588 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5589 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5591 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5592 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5595 /* Marks virtual operands of all statements in basic blocks BBS for
5596 renaming. */
5598 static void
5599 mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5601 basic_block bb;
5602 unsigned i;
5604 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5605 mark_virtual_ops_in_bb (bb);
5608 /* Move basic block BB from function CFUN to function DEST_FN. The
5609 block is moved out of the original linked list and placed after
5610 block AFTER in the new list. Also, the block is removed from the
5611 original array of blocks and placed in DEST_FN's array of blocks.
5612 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5613 updated to reflect the moved edges.
5615 The local variables are remapped to new instances, VARS_MAP is used
5616 to record the mapping. */
5618 static void
5619 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5620 basic_block after, bool update_edge_count_p,
5621 struct move_stmt_d *d, int eh_offset)
5623 struct control_flow_graph *cfg;
5624 edge_iterator ei;
5625 edge e;
5626 gimple_stmt_iterator si;
5627 unsigned old_len, new_len;
5629 /* Remove BB from dominance structures. */
5630 delete_from_dominance_info (CDI_DOMINATORS, bb);
5631 if (current_loops)
5632 remove_bb_from_loops (bb);
5634 /* Link BB to the new linked list. */
5635 move_block_after (bb, after);
5637 /* Update the edge count in the corresponding flowgraphs. */
5638 if (update_edge_count_p)
5639 FOR_EACH_EDGE (e, ei, bb->succs)
5641 cfun->cfg->x_n_edges--;
5642 dest_cfun->cfg->x_n_edges++;
5645 /* Remove BB from the original basic block array. */
5646 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5647 cfun->cfg->x_n_basic_blocks--;
5649 /* Grow DEST_CFUN's basic block array if needed. */
5650 cfg = dest_cfun->cfg;
5651 cfg->x_n_basic_blocks++;
5652 if (bb->index >= cfg->x_last_basic_block)
5653 cfg->x_last_basic_block = bb->index + 1;
5655 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5656 if ((unsigned) cfg->x_last_basic_block >= old_len)
5658 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5659 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5660 new_len);
5663 VEC_replace (basic_block, cfg->x_basic_block_info,
5664 bb->index, bb);
5666 /* Remap the variables in phi nodes. */
5667 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5669 gimple phi = gsi_stmt (si);
5670 use_operand_p use;
5671 tree op = PHI_RESULT (phi);
5672 ssa_op_iter oi;
5674 if (!is_gimple_reg (op))
5676 /* Remove the phi nodes for virtual operands (alias analysis will be
5677 run for the new function, anyway). */
5678 remove_phi_node (&si, true);
5679 continue;
5682 SET_PHI_RESULT (phi,
5683 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5684 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5686 op = USE_FROM_PTR (use);
5687 if (TREE_CODE (op) == SSA_NAME)
5688 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5691 gsi_next (&si);
5694 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5696 gimple stmt = gsi_stmt (si);
5697 int region;
5698 struct walk_stmt_info wi;
5700 memset (&wi, 0, sizeof (wi));
5701 wi.info = d;
5702 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5704 if (gimple_code (stmt) == GIMPLE_LABEL)
5706 tree label = gimple_label_label (stmt);
5707 int uid = LABEL_DECL_UID (label);
5709 gcc_assert (uid > -1);
5711 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5712 if (old_len <= (unsigned) uid)
5714 new_len = 3 * uid / 2;
5715 VEC_safe_grow_cleared (basic_block, gc,
5716 cfg->x_label_to_block_map, new_len);
5719 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5720 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5722 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5724 if (uid >= dest_cfun->cfg->last_label_uid)
5725 dest_cfun->cfg->last_label_uid = uid + 1;
5727 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5728 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5730 region = lookup_stmt_eh_region (stmt);
5731 if (region >= 0)
5733 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5734 remove_stmt_from_eh_region (stmt);
5735 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5736 gimple_remove_stmt_histograms (cfun, stmt);
5739 /* We cannot leave any operands allocated from the operand caches of
5740 the current function. */
5741 free_stmt_operands (stmt);
5742 push_cfun (dest_cfun);
5743 update_stmt (stmt);
5744 pop_cfun ();
5748 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5749 the outermost EH region. Use REGION as the incoming base EH region. */
5751 static int
5752 find_outermost_region_in_block (struct function *src_cfun,
5753 basic_block bb, int region)
5755 gimple_stmt_iterator si;
5757 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5759 gimple stmt = gsi_stmt (si);
5760 int stmt_region;
5762 if (gimple_code (stmt) == GIMPLE_RESX)
5763 stmt_region = gimple_resx_region (stmt);
5764 else
5765 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5766 if (stmt_region > 0)
5768 if (region < 0)
5769 region = stmt_region;
5770 else if (stmt_region != region)
5772 region = eh_region_outermost (src_cfun, stmt_region, region);
5773 gcc_assert (region != -1);
5778 return region;
5781 static tree
5782 new_label_mapper (tree decl, void *data)
5784 htab_t hash = (htab_t) data;
5785 struct tree_map *m;
5786 void **slot;
5788 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5790 m = XNEW (struct tree_map);
5791 m->hash = DECL_UID (decl);
5792 m->base.from = decl;
5793 m->to = create_artificial_label ();
5794 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5795 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5796 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5798 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5799 gcc_assert (*slot == NULL);
5801 *slot = m;
5803 return m->to;
5806 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5807 subblocks. */
5809 static void
5810 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5811 tree to_context)
5813 tree *tp, t;
5815 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5817 t = *tp;
5818 replace_by_duplicate_decl (&t, vars_map, to_context);
5819 if (t != *tp)
5821 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5823 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5824 DECL_HAS_VALUE_EXPR_P (t) = 1;
5826 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5827 *tp = t;
5831 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5832 replace_block_vars_by_duplicates (block, vars_map, to_context);
5835 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5836 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5837 single basic block in the original CFG and the new basic block is
5838 returned. DEST_CFUN must not have a CFG yet.
5840 Note that the region need not be a pure SESE region. Blocks inside
5841 the region may contain calls to abort/exit. The only restriction
5842 is that ENTRY_BB should be the only entry point and it must
5843 dominate EXIT_BB.
5845 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5846 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5847 to the new function.
5849 All local variables referenced in the region are assumed to be in
5850 the corresponding BLOCK_VARS and unexpanded variable lists
5851 associated with DEST_CFUN. */
5853 basic_block
5854 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5855 basic_block exit_bb, tree orig_block)
5857 VEC(basic_block,heap) *bbs, *dom_bbs;
5858 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5859 basic_block after, bb, *entry_pred, *exit_succ, abb;
5860 struct function *saved_cfun = cfun;
5861 int *entry_flag, *exit_flag, eh_offset;
5862 unsigned *entry_prob, *exit_prob;
5863 unsigned i, num_entry_edges, num_exit_edges;
5864 edge e;
5865 edge_iterator ei;
5866 htab_t new_label_map;
5867 struct pointer_map_t *vars_map;
5868 struct loop *loop = entry_bb->loop_father;
5869 struct move_stmt_d d;
5871 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5872 region. */
5873 gcc_assert (entry_bb != exit_bb
5874 && (!exit_bb
5875 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5877 /* Collect all the blocks in the region. Manually add ENTRY_BB
5878 because it won't be added by dfs_enumerate_from. */
5879 bbs = NULL;
5880 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5881 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5883 /* The blocks that used to be dominated by something in BBS will now be
5884 dominated by the new block. */
5885 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5886 VEC_address (basic_block, bbs),
5887 VEC_length (basic_block, bbs));
5889 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5890 the predecessor edges to ENTRY_BB and the successor edges to
5891 EXIT_BB so that we can re-attach them to the new basic block that
5892 will replace the region. */
5893 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5894 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5895 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5896 entry_prob = XNEWVEC (unsigned, num_entry_edges);
5897 i = 0;
5898 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5900 entry_prob[i] = e->probability;
5901 entry_flag[i] = e->flags;
5902 entry_pred[i++] = e->src;
5903 remove_edge (e);
5906 if (exit_bb)
5908 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5909 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5910 sizeof (basic_block));
5911 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5912 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5913 i = 0;
5914 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5916 exit_prob[i] = e->probability;
5917 exit_flag[i] = e->flags;
5918 exit_succ[i++] = e->dest;
5919 remove_edge (e);
5922 else
5924 num_exit_edges = 0;
5925 exit_succ = NULL;
5926 exit_flag = NULL;
5927 exit_prob = NULL;
5930 /* Switch context to the child function to initialize DEST_FN's CFG. */
5931 gcc_assert (dest_cfun->cfg == NULL);
5932 push_cfun (dest_cfun);
5934 init_empty_tree_cfg ();
5936 /* Initialize EH information for the new function. */
5937 eh_offset = 0;
5938 new_label_map = NULL;
5939 if (saved_cfun->eh)
5941 int region = -1;
5943 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5944 region = find_outermost_region_in_block (saved_cfun, bb, region);
5946 init_eh_for_function ();
5947 if (region != -1)
5949 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
5950 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
5951 new_label_map, region, 0);
5955 pop_cfun ();
5957 /* The ssa form for virtual operands in the source function will have to
5958 be repaired. We do not care for the real operands -- the sese region
5959 must be closed with respect to those. */
5960 mark_virtual_ops_in_region (bbs);
5962 /* Move blocks from BBS into DEST_CFUN. */
5963 gcc_assert (VEC_length (basic_block, bbs) >= 2);
5964 after = dest_cfun->cfg->x_entry_block_ptr;
5965 vars_map = pointer_map_create ();
5967 memset (&d, 0, sizeof (d));
5968 d.vars_map = vars_map;
5969 d.from_context = cfun->decl;
5970 d.to_context = dest_cfun->decl;
5971 d.new_label_map = new_label_map;
5972 d.remap_decls_p = true;
5973 d.orig_block = orig_block;
5974 d.new_block = DECL_INITIAL (dest_cfun->decl);
5976 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5978 /* No need to update edge counts on the last block. It has
5979 already been updated earlier when we detached the region from
5980 the original CFG. */
5981 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
5982 after = bb;
5985 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
5986 if (orig_block)
5988 tree block;
5989 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
5990 == NULL_TREE);
5991 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
5992 = BLOCK_SUBBLOCKS (orig_block);
5993 for (block = BLOCK_SUBBLOCKS (orig_block);
5994 block; block = BLOCK_CHAIN (block))
5995 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
5996 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
5999 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6000 vars_map, dest_cfun->decl);
6002 if (new_label_map)
6003 htab_delete (new_label_map);
6004 pointer_map_destroy (vars_map);
6006 /* Rewire the entry and exit blocks. The successor to the entry
6007 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6008 the child function. Similarly, the predecessor of DEST_FN's
6009 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6010 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6011 various CFG manipulation function get to the right CFG.
6013 FIXME, this is silly. The CFG ought to become a parameter to
6014 these helpers. */
6015 push_cfun (dest_cfun);
6016 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6017 if (exit_bb)
6018 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6019 pop_cfun ();
6021 /* Back in the original function, the SESE region has disappeared,
6022 create a new basic block in its place. */
6023 bb = create_empty_bb (entry_pred[0]);
6024 if (current_loops)
6025 add_bb_to_loop (bb, loop);
6026 for (i = 0; i < num_entry_edges; i++)
6028 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6029 e->probability = entry_prob[i];
6032 for (i = 0; i < num_exit_edges; i++)
6034 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6035 e->probability = exit_prob[i];
6038 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6039 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6040 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6041 VEC_free (basic_block, heap, dom_bbs);
6043 if (exit_bb)
6045 free (exit_prob);
6046 free (exit_flag);
6047 free (exit_succ);
6049 free (entry_prob);
6050 free (entry_flag);
6051 free (entry_pred);
6052 VEC_free (basic_block, heap, bbs);
6054 return bb;
6058 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6061 void
6062 dump_function_to_file (tree fn, FILE *file, int flags)
6064 tree arg, vars, var;
6065 struct function *dsf;
6066 bool ignore_topmost_bind = false, any_var = false;
6067 basic_block bb;
6068 tree chain;
6070 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6072 arg = DECL_ARGUMENTS (fn);
6073 while (arg)
6075 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6076 fprintf (file, " ");
6077 print_generic_expr (file, arg, dump_flags);
6078 if (flags & TDF_VERBOSE)
6079 print_node (file, "", arg, 4);
6080 if (TREE_CHAIN (arg))
6081 fprintf (file, ", ");
6082 arg = TREE_CHAIN (arg);
6084 fprintf (file, ")\n");
6086 if (flags & TDF_VERBOSE)
6087 print_node (file, "", fn, 2);
6089 dsf = DECL_STRUCT_FUNCTION (fn);
6090 if (dsf && (flags & TDF_DETAILS))
6091 dump_eh_tree (file, dsf);
6093 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6095 dump_node (fn, TDF_SLIM | flags, file);
6096 return;
6099 /* Switch CFUN to point to FN. */
6100 push_cfun (DECL_STRUCT_FUNCTION (fn));
6102 /* When GIMPLE is lowered, the variables are no longer available in
6103 BIND_EXPRs, so display them separately. */
6104 if (cfun && cfun->decl == fn && cfun->local_decls)
6106 ignore_topmost_bind = true;
6108 fprintf (file, "{\n");
6109 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6111 var = TREE_VALUE (vars);
6113 print_generic_decl (file, var, flags);
6114 if (flags & TDF_VERBOSE)
6115 print_node (file, "", var, 4);
6116 fprintf (file, "\n");
6118 any_var = true;
6122 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6124 /* If the CFG has been built, emit a CFG-based dump. */
6125 check_bb_profile (ENTRY_BLOCK_PTR, file);
6126 if (!ignore_topmost_bind)
6127 fprintf (file, "{\n");
6129 if (any_var && n_basic_blocks)
6130 fprintf (file, "\n");
6132 FOR_EACH_BB (bb)
6133 gimple_dump_bb (bb, file, 2, flags);
6135 fprintf (file, "}\n");
6136 check_bb_profile (EXIT_BLOCK_PTR, file);
6138 else if (DECL_SAVED_TREE (fn) == NULL)
6140 /* The function is now in GIMPLE form but the CFG has not been
6141 built yet. Emit the single sequence of GIMPLE statements
6142 that make up its body. */
6143 gimple_seq body = gimple_body (fn);
6145 if (gimple_seq_first_stmt (body)
6146 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6147 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6148 print_gimple_seq (file, body, 0, flags);
6149 else
6151 if (!ignore_topmost_bind)
6152 fprintf (file, "{\n");
6154 if (any_var)
6155 fprintf (file, "\n");
6157 print_gimple_seq (file, body, 2, flags);
6158 fprintf (file, "}\n");
6161 else
6163 int indent;
6165 /* Make a tree based dump. */
6166 chain = DECL_SAVED_TREE (fn);
6168 if (chain && TREE_CODE (chain) == BIND_EXPR)
6170 if (ignore_topmost_bind)
6172 chain = BIND_EXPR_BODY (chain);
6173 indent = 2;
6175 else
6176 indent = 0;
6178 else
6180 if (!ignore_topmost_bind)
6181 fprintf (file, "{\n");
6182 indent = 2;
6185 if (any_var)
6186 fprintf (file, "\n");
6188 print_generic_stmt_indented (file, chain, flags, indent);
6189 if (ignore_topmost_bind)
6190 fprintf (file, "}\n");
6193 fprintf (file, "\n\n");
6195 /* Restore CFUN. */
6196 pop_cfun ();
6200 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6202 void
6203 debug_function (tree fn, int flags)
6205 dump_function_to_file (fn, stderr, flags);
6209 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6211 static void
6212 print_pred_bbs (FILE *file, basic_block bb)
6214 edge e;
6215 edge_iterator ei;
6217 FOR_EACH_EDGE (e, ei, bb->preds)
6218 fprintf (file, "bb_%d ", e->src->index);
6222 /* Print on FILE the indexes for the successors of basic_block BB. */
6224 static void
6225 print_succ_bbs (FILE *file, basic_block bb)
6227 edge e;
6228 edge_iterator ei;
6230 FOR_EACH_EDGE (e, ei, bb->succs)
6231 fprintf (file, "bb_%d ", e->dest->index);
6234 /* Print to FILE the basic block BB following the VERBOSITY level. */
6236 void
6237 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6239 char *s_indent = (char *) alloca ((size_t) indent + 1);
6240 memset ((void *) s_indent, ' ', (size_t) indent);
6241 s_indent[indent] = '\0';
6243 /* Print basic_block's header. */
6244 if (verbosity >= 2)
6246 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6247 print_pred_bbs (file, bb);
6248 fprintf (file, "}, succs = {");
6249 print_succ_bbs (file, bb);
6250 fprintf (file, "})\n");
6253 /* Print basic_block's body. */
6254 if (verbosity >= 3)
6256 fprintf (file, "%s {\n", s_indent);
6257 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6258 fprintf (file, "%s }\n", s_indent);
6262 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6264 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6265 VERBOSITY level this outputs the contents of the loop, or just its
6266 structure. */
6268 static void
6269 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6271 char *s_indent;
6272 basic_block bb;
6274 if (loop == NULL)
6275 return;
6277 s_indent = (char *) alloca ((size_t) indent + 1);
6278 memset ((void *) s_indent, ' ', (size_t) indent);
6279 s_indent[indent] = '\0';
6281 /* Print loop's header. */
6282 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6283 loop->num, loop->header->index, loop->latch->index);
6284 fprintf (file, ", niter = ");
6285 print_generic_expr (file, loop->nb_iterations, 0);
6287 if (loop->any_upper_bound)
6289 fprintf (file, ", upper_bound = ");
6290 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6293 if (loop->any_estimate)
6295 fprintf (file, ", estimate = ");
6296 dump_double_int (file, loop->nb_iterations_estimate, true);
6298 fprintf (file, ")\n");
6300 /* Print loop's body. */
6301 if (verbosity >= 1)
6303 fprintf (file, "%s{\n", s_indent);
6304 FOR_EACH_BB (bb)
6305 if (bb->loop_father == loop)
6306 print_loops_bb (file, bb, indent, verbosity);
6308 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6309 fprintf (file, "%s}\n", s_indent);
6313 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6314 spaces. Following VERBOSITY level this outputs the contents of the
6315 loop, or just its structure. */
6317 static void
6318 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6320 if (loop == NULL)
6321 return;
6323 print_loop (file, loop, indent, verbosity);
6324 print_loop_and_siblings (file, loop->next, indent, verbosity);
6327 /* Follow a CFG edge from the entry point of the program, and on entry
6328 of a loop, pretty print the loop structure on FILE. */
6330 void
6331 print_loops (FILE *file, int verbosity)
6333 basic_block bb;
6335 bb = ENTRY_BLOCK_PTR;
6336 if (bb && bb->loop_father)
6337 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6341 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6343 void
6344 debug_loops (int verbosity)
6346 print_loops (stderr, verbosity);
6349 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6351 void
6352 debug_loop (struct loop *loop, int verbosity)
6354 print_loop (stderr, loop, 0, verbosity);
6357 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6358 level. */
6360 void
6361 debug_loop_num (unsigned num, int verbosity)
6363 debug_loop (get_loop (num), verbosity);
6366 /* Return true if BB ends with a call, possibly followed by some
6367 instructions that must stay with the call. Return false,
6368 otherwise. */
6370 static bool
6371 gimple_block_ends_with_call_p (basic_block bb)
6373 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6374 return is_gimple_call (gsi_stmt (gsi));
6378 /* Return true if BB ends with a conditional branch. Return false,
6379 otherwise. */
6381 static bool
6382 gimple_block_ends_with_condjump_p (const_basic_block bb)
6384 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6385 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6389 /* Return true if we need to add fake edge to exit at statement T.
6390 Helper function for gimple_flow_call_edges_add. */
6392 static bool
6393 need_fake_edge_p (gimple t)
6395 tree fndecl = NULL_TREE;
6396 int call_flags = 0;
6398 /* NORETURN and LONGJMP calls already have an edge to exit.
6399 CONST and PURE calls do not need one.
6400 We don't currently check for CONST and PURE here, although
6401 it would be a good idea, because those attributes are
6402 figured out from the RTL in mark_constant_function, and
6403 the counter incrementation code from -fprofile-arcs
6404 leads to different results from -fbranch-probabilities. */
6405 if (is_gimple_call (t))
6407 fndecl = gimple_call_fndecl (t);
6408 call_flags = gimple_call_flags (t);
6411 if (is_gimple_call (t)
6412 && fndecl
6413 && DECL_BUILT_IN (fndecl)
6414 && (call_flags & ECF_NOTHROW)
6415 && !(call_flags & ECF_NORETURN)
6416 && !(call_flags & ECF_RETURNS_TWICE))
6417 return false;
6419 if (is_gimple_call (t)
6420 && !(call_flags & ECF_NORETURN))
6421 return true;
6423 if (gimple_code (t) == GIMPLE_ASM
6424 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6425 return true;
6427 return false;
6431 /* Add fake edges to the function exit for any non constant and non
6432 noreturn calls, volatile inline assembly in the bitmap of blocks
6433 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6434 the number of blocks that were split.
6436 The goal is to expose cases in which entering a basic block does
6437 not imply that all subsequent instructions must be executed. */
6439 static int
6440 gimple_flow_call_edges_add (sbitmap blocks)
6442 int i;
6443 int blocks_split = 0;
6444 int last_bb = last_basic_block;
6445 bool check_last_block = false;
6447 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6448 return 0;
6450 if (! blocks)
6451 check_last_block = true;
6452 else
6453 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6455 /* In the last basic block, before epilogue generation, there will be
6456 a fallthru edge to EXIT. Special care is required if the last insn
6457 of the last basic block is a call because make_edge folds duplicate
6458 edges, which would result in the fallthru edge also being marked
6459 fake, which would result in the fallthru edge being removed by
6460 remove_fake_edges, which would result in an invalid CFG.
6462 Moreover, we can't elide the outgoing fake edge, since the block
6463 profiler needs to take this into account in order to solve the minimal
6464 spanning tree in the case that the call doesn't return.
6466 Handle this by adding a dummy instruction in a new last basic block. */
6467 if (check_last_block)
6469 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6470 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6471 gimple t = NULL;
6473 if (!gsi_end_p (gsi))
6474 t = gsi_stmt (gsi);
6476 if (t && need_fake_edge_p (t))
6478 edge e;
6480 e = find_edge (bb, EXIT_BLOCK_PTR);
6481 if (e)
6483 gsi_insert_on_edge (e, gimple_build_nop ());
6484 gsi_commit_edge_inserts ();
6489 /* Now add fake edges to the function exit for any non constant
6490 calls since there is no way that we can determine if they will
6491 return or not... */
6492 for (i = 0; i < last_bb; i++)
6494 basic_block bb = BASIC_BLOCK (i);
6495 gimple_stmt_iterator gsi;
6496 gimple stmt, last_stmt;
6498 if (!bb)
6499 continue;
6501 if (blocks && !TEST_BIT (blocks, i))
6502 continue;
6504 gsi = gsi_last_bb (bb);
6505 if (!gsi_end_p (gsi))
6507 last_stmt = gsi_stmt (gsi);
6510 stmt = gsi_stmt (gsi);
6511 if (need_fake_edge_p (stmt))
6513 edge e;
6515 /* The handling above of the final block before the
6516 epilogue should be enough to verify that there is
6517 no edge to the exit block in CFG already.
6518 Calling make_edge in such case would cause us to
6519 mark that edge as fake and remove it later. */
6520 #ifdef ENABLE_CHECKING
6521 if (stmt == last_stmt)
6523 e = find_edge (bb, EXIT_BLOCK_PTR);
6524 gcc_assert (e == NULL);
6526 #endif
6528 /* Note that the following may create a new basic block
6529 and renumber the existing basic blocks. */
6530 if (stmt != last_stmt)
6532 e = split_block (bb, stmt);
6533 if (e)
6534 blocks_split++;
6536 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6538 gsi_prev (&gsi);
6540 while (!gsi_end_p (gsi));
6544 if (blocks_split)
6545 verify_flow_info ();
6547 return blocks_split;
6550 /* Purge dead abnormal call edges from basic block BB. */
6552 bool
6553 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6555 bool changed = gimple_purge_dead_eh_edges (bb);
6557 if (cfun->has_nonlocal_label)
6559 gimple stmt = last_stmt (bb);
6560 edge_iterator ei;
6561 edge e;
6563 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6564 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6566 if (e->flags & EDGE_ABNORMAL)
6568 remove_edge (e);
6569 changed = true;
6571 else
6572 ei_next (&ei);
6575 /* See gimple_purge_dead_eh_edges below. */
6576 if (changed)
6577 free_dominance_info (CDI_DOMINATORS);
6580 return changed;
6583 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6585 static void
6586 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6588 basic_block son;
6590 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6591 for (son = first_dom_son (CDI_DOMINATORS, bb);
6592 son;
6593 son = next_dom_son (CDI_DOMINATORS, son))
6594 get_all_dominated_blocks (son, dom_bbs);
6597 /* Removes edge E and all the blocks dominated by it, and updates dominance
6598 information. The IL in E->src needs to be updated separately.
6599 If dominance info is not available, only the edge E is removed.*/
6601 void
6602 remove_edge_and_dominated_blocks (edge e)
6604 VEC (basic_block, heap) *bbs_to_remove = NULL;
6605 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6606 bitmap df, df_idom;
6607 edge f;
6608 edge_iterator ei;
6609 bool none_removed = false;
6610 unsigned i;
6611 basic_block bb, dbb;
6612 bitmap_iterator bi;
6614 if (!dom_info_available_p (CDI_DOMINATORS))
6616 remove_edge (e);
6617 return;
6620 /* No updating is needed for edges to exit. */
6621 if (e->dest == EXIT_BLOCK_PTR)
6623 if (cfgcleanup_altered_bbs)
6624 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6625 remove_edge (e);
6626 return;
6629 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6630 that is not dominated by E->dest, then this set is empty. Otherwise,
6631 all the basic blocks dominated by E->dest are removed.
6633 Also, to DF_IDOM we store the immediate dominators of the blocks in
6634 the dominance frontier of E (i.e., of the successors of the
6635 removed blocks, if there are any, and of E->dest otherwise). */
6636 FOR_EACH_EDGE (f, ei, e->dest->preds)
6638 if (f == e)
6639 continue;
6641 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6643 none_removed = true;
6644 break;
6648 df = BITMAP_ALLOC (NULL);
6649 df_idom = BITMAP_ALLOC (NULL);
6651 if (none_removed)
6652 bitmap_set_bit (df_idom,
6653 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6654 else
6656 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6657 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6659 FOR_EACH_EDGE (f, ei, bb->succs)
6661 if (f->dest != EXIT_BLOCK_PTR)
6662 bitmap_set_bit (df, f->dest->index);
6665 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6666 bitmap_clear_bit (df, bb->index);
6668 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6670 bb = BASIC_BLOCK (i);
6671 bitmap_set_bit (df_idom,
6672 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6676 if (cfgcleanup_altered_bbs)
6678 /* Record the set of the altered basic blocks. */
6679 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6680 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6683 /* Remove E and the cancelled blocks. */
6684 if (none_removed)
6685 remove_edge (e);
6686 else
6688 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6689 delete_basic_block (bb);
6692 /* Update the dominance information. The immediate dominator may change only
6693 for blocks whose immediate dominator belongs to DF_IDOM:
6695 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6696 removal. Let Z the arbitrary block such that idom(Z) = Y and
6697 Z dominates X after the removal. Before removal, there exists a path P
6698 from Y to X that avoids Z. Let F be the last edge on P that is
6699 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6700 dominates W, and because of P, Z does not dominate W), and W belongs to
6701 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6702 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6704 bb = BASIC_BLOCK (i);
6705 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6706 dbb;
6707 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6708 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6711 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6713 BITMAP_FREE (df);
6714 BITMAP_FREE (df_idom);
6715 VEC_free (basic_block, heap, bbs_to_remove);
6716 VEC_free (basic_block, heap, bbs_to_fix_dom);
6719 /* Purge dead EH edges from basic block BB. */
6721 bool
6722 gimple_purge_dead_eh_edges (basic_block bb)
6724 bool changed = false;
6725 edge e;
6726 edge_iterator ei;
6727 gimple stmt = last_stmt (bb);
6729 if (stmt && stmt_can_throw_internal (stmt))
6730 return false;
6732 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6734 if (e->flags & EDGE_EH)
6736 remove_edge_and_dominated_blocks (e);
6737 changed = true;
6739 else
6740 ei_next (&ei);
6743 return changed;
6746 bool
6747 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6749 bool changed = false;
6750 unsigned i;
6751 bitmap_iterator bi;
6753 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6755 basic_block bb = BASIC_BLOCK (i);
6757 /* Earlier gimple_purge_dead_eh_edges could have removed
6758 this basic block already. */
6759 gcc_assert (bb || changed);
6760 if (bb != NULL)
6761 changed |= gimple_purge_dead_eh_edges (bb);
6764 return changed;
6767 /* This function is called whenever a new edge is created or
6768 redirected. */
6770 static void
6771 gimple_execute_on_growing_pred (edge e)
6773 basic_block bb = e->dest;
6775 if (phi_nodes (bb))
6776 reserve_phi_args_for_new_edge (bb);
6779 /* This function is called immediately before edge E is removed from
6780 the edge vector E->dest->preds. */
6782 static void
6783 gimple_execute_on_shrinking_pred (edge e)
6785 if (phi_nodes (e->dest))
6786 remove_phi_args (e);
6789 /*---------------------------------------------------------------------------
6790 Helper functions for Loop versioning
6791 ---------------------------------------------------------------------------*/
6793 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6794 of 'first'. Both of them are dominated by 'new_head' basic block. When
6795 'new_head' was created by 'second's incoming edge it received phi arguments
6796 on the edge by split_edge(). Later, additional edge 'e' was created to
6797 connect 'new_head' and 'first'. Now this routine adds phi args on this
6798 additional edge 'e' that new_head to second edge received as part of edge
6799 splitting. */
6801 static void
6802 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6803 basic_block new_head, edge e)
6805 gimple phi1, phi2;
6806 gimple_stmt_iterator psi1, psi2;
6807 tree def;
6808 edge e2 = find_edge (new_head, second);
6810 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6811 edge, we should always have an edge from NEW_HEAD to SECOND. */
6812 gcc_assert (e2 != NULL);
6814 /* Browse all 'second' basic block phi nodes and add phi args to
6815 edge 'e' for 'first' head. PHI args are always in correct order. */
6817 for (psi2 = gsi_start_phis (second),
6818 psi1 = gsi_start_phis (first);
6819 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6820 gsi_next (&psi2), gsi_next (&psi1))
6822 phi1 = gsi_stmt (psi1);
6823 phi2 = gsi_stmt (psi2);
6824 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6825 add_phi_arg (phi1, def, e);
6830 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6831 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6832 the destination of the ELSE part. */
6834 static void
6835 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6836 basic_block second_head ATTRIBUTE_UNUSED,
6837 basic_block cond_bb, void *cond_e)
6839 gimple_stmt_iterator gsi;
6840 gimple new_cond_expr;
6841 tree cond_expr = (tree) cond_e;
6842 edge e0;
6844 /* Build new conditional expr */
6845 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6846 NULL_TREE, NULL_TREE);
6848 /* Add new cond in cond_bb. */
6849 gsi = gsi_last_bb (cond_bb);
6850 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6852 /* Adjust edges appropriately to connect new head with first head
6853 as well as second head. */
6854 e0 = single_succ_edge (cond_bb);
6855 e0->flags &= ~EDGE_FALLTHRU;
6856 e0->flags |= EDGE_FALSE_VALUE;
6859 struct cfg_hooks gimple_cfg_hooks = {
6860 "gimple",
6861 gimple_verify_flow_info,
6862 gimple_dump_bb, /* dump_bb */
6863 create_bb, /* create_basic_block */
6864 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6865 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6866 gimple_can_remove_branch_p, /* can_remove_branch_p */
6867 remove_bb, /* delete_basic_block */
6868 gimple_split_block, /* split_block */
6869 gimple_move_block_after, /* move_block_after */
6870 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6871 gimple_merge_blocks, /* merge_blocks */
6872 gimple_predict_edge, /* predict_edge */
6873 gimple_predicted_by_p, /* predicted_by_p */
6874 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6875 gimple_duplicate_bb, /* duplicate_block */
6876 gimple_split_edge, /* split_edge */
6877 gimple_make_forwarder_block, /* make_forward_block */
6878 NULL, /* tidy_fallthru_edge */
6879 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6880 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6881 gimple_flow_call_edges_add, /* flow_call_edges_add */
6882 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
6883 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6884 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6885 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6886 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6887 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6888 flush_pending_stmts /* flush_pending_stmts */
6892 /* Split all critical edges. */
6894 static unsigned int
6895 split_critical_edges (void)
6897 basic_block bb;
6898 edge e;
6899 edge_iterator ei;
6901 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6902 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6903 mappings around the calls to split_edge. */
6904 start_recording_case_labels ();
6905 FOR_ALL_BB (bb)
6907 FOR_EACH_EDGE (e, ei, bb->succs)
6908 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6910 split_edge (e);
6913 end_recording_case_labels ();
6914 return 0;
6917 struct gimple_opt_pass pass_split_crit_edges =
6920 GIMPLE_PASS,
6921 "crited", /* name */
6922 NULL, /* gate */
6923 split_critical_edges, /* execute */
6924 NULL, /* sub */
6925 NULL, /* next */
6926 0, /* static_pass_number */
6927 TV_TREE_SPLIT_EDGES, /* tv_id */
6928 PROP_cfg, /* properties required */
6929 PROP_no_crit_edges, /* properties_provided */
6930 0, /* properties_destroyed */
6931 0, /* todo_flags_start */
6932 TODO_dump_func /* todo_flags_finish */
6937 /* Build a ternary operation and gimplify it. Emit code before GSI.
6938 Return the gimple_val holding the result. */
6940 tree
6941 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
6942 tree type, tree a, tree b, tree c)
6944 tree ret;
6946 ret = fold_build3 (code, type, a, b, c);
6947 STRIP_NOPS (ret);
6949 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
6950 GSI_SAME_STMT);
6953 /* Build a binary operation and gimplify it. Emit code before GSI.
6954 Return the gimple_val holding the result. */
6956 tree
6957 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
6958 tree type, tree a, tree b)
6960 tree ret;
6962 ret = fold_build2 (code, type, a, b);
6963 STRIP_NOPS (ret);
6965 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
6966 GSI_SAME_STMT);
6969 /* Build a unary operation and gimplify it. Emit code before GSI.
6970 Return the gimple_val holding the result. */
6972 tree
6973 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
6974 tree a)
6976 tree ret;
6978 ret = fold_build1 (code, type, a);
6979 STRIP_NOPS (ret);
6981 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
6982 GSI_SAME_STMT);
6987 /* Emit return warnings. */
6989 static unsigned int
6990 execute_warn_function_return (void)
6992 source_location location;
6993 gimple last;
6994 edge e;
6995 edge_iterator ei;
6997 /* If we have a path to EXIT, then we do return. */
6998 if (TREE_THIS_VOLATILE (cfun->decl)
6999 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7001 location = UNKNOWN_LOCATION;
7002 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7004 last = last_stmt (e->src);
7005 if (gimple_code (last) == GIMPLE_RETURN
7006 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7007 break;
7009 if (location == UNKNOWN_LOCATION)
7010 location = cfun->function_end_locus;
7011 warning (0, "%H%<noreturn%> function does return", &location);
7014 /* If we see "return;" in some basic block, then we do reach the end
7015 without returning a value. */
7016 else if (warn_return_type
7017 && !TREE_NO_WARNING (cfun->decl)
7018 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7019 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7021 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7023 gimple last = last_stmt (e->src);
7024 if (gimple_code (last) == GIMPLE_RETURN
7025 && gimple_return_retval (last) == NULL
7026 && !gimple_no_warning_p (last))
7028 location = gimple_location (last);
7029 if (location == UNKNOWN_LOCATION)
7030 location = cfun->function_end_locus;
7031 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7032 TREE_NO_WARNING (cfun->decl) = 1;
7033 break;
7037 return 0;
7041 /* Given a basic block B which ends with a conditional and has
7042 precisely two successors, determine which of the edges is taken if
7043 the conditional is true and which is taken if the conditional is
7044 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7046 void
7047 extract_true_false_edges_from_block (basic_block b,
7048 edge *true_edge,
7049 edge *false_edge)
7051 edge e = EDGE_SUCC (b, 0);
7053 if (e->flags & EDGE_TRUE_VALUE)
7055 *true_edge = e;
7056 *false_edge = EDGE_SUCC (b, 1);
7058 else
7060 *false_edge = e;
7061 *true_edge = EDGE_SUCC (b, 1);
7065 struct gimple_opt_pass pass_warn_function_return =
7068 GIMPLE_PASS,
7069 NULL, /* name */
7070 NULL, /* gate */
7071 execute_warn_function_return, /* execute */
7072 NULL, /* sub */
7073 NULL, /* next */
7074 0, /* static_pass_number */
7075 0, /* tv_id */
7076 PROP_cfg, /* properties_required */
7077 0, /* properties_provided */
7078 0, /* properties_destroyed */
7079 0, /* todo_flags_start */
7080 0 /* todo_flags_finish */
7084 /* Emit noreturn warnings. */
7086 static unsigned int
7087 execute_warn_function_noreturn (void)
7089 if (warn_missing_noreturn
7090 && !TREE_THIS_VOLATILE (cfun->decl)
7091 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7092 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7093 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7094 "for attribute %<noreturn%>",
7095 cfun->decl);
7096 return 0;
7099 struct gimple_opt_pass pass_warn_function_noreturn =
7102 GIMPLE_PASS,
7103 NULL, /* name */
7104 NULL, /* gate */
7105 execute_warn_function_noreturn, /* execute */
7106 NULL, /* sub */
7107 NULL, /* next */
7108 0, /* static_pass_number */
7109 0, /* tv_id */
7110 PROP_cfg, /* properties_required */
7111 0, /* properties_provided */
7112 0, /* properties_destroyed */
7113 0, /* todo_flags_start */
7114 0 /* todo_flags_finish */