cp/:
[official-gcc.git] / gcc / tree-cfg.c
blob4c7c0db12b6947e54c6c8a41e2d6a88ef7772965
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
88 location_t locus;
89 int discriminator;
91 static htab_t discriminator_per_locus;
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static unsigned int locus_map_hash (const void *);
103 static int locus_map_eq (const void *, const void *);
104 static void assign_discriminator (location_t, basic_block);
105 static edge gimple_redirect_edge_and_branch (edge, basic_block);
106 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
107 static unsigned int split_critical_edges (void);
109 /* Various helpers. */
110 static inline bool stmt_starts_bb_p (gimple, gimple);
111 static int gimple_verify_flow_info (void);
112 static void gimple_make_forwarder_block (edge);
113 static void gimple_cfg2vcg (FILE *);
114 static gimple first_non_label_stmt (basic_block);
116 /* Flowgraph optimization and cleanup. */
117 static void gimple_merge_blocks (basic_block, basic_block);
118 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
119 static void remove_bb (basic_block);
120 static edge find_taken_edge_computed_goto (basic_block, tree);
121 static edge find_taken_edge_cond_expr (basic_block, tree);
122 static edge find_taken_edge_switch_expr (basic_block, tree);
123 static tree find_case_label_for_value (gimple, tree);
125 void
126 init_empty_tree_cfg_for_function (struct function *fn)
128 /* Initialize the basic block array. */
129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
137 initial_cfg_capacity);
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
144 initial_cfg_capacity);
146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
157 void
158 init_empty_tree_cfg (void)
160 init_empty_tree_cfg_for_function (cfun);
163 /*---------------------------------------------------------------------------
164 Create basic blocks
165 ---------------------------------------------------------------------------*/
167 /* Entry point to the CFG builder for trees. SEQ is the sequence of
168 statements to be added to the flowgraph. */
170 static void
171 build_gimple_cfg (gimple_seq seq)
173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
178 init_empty_tree_cfg ();
180 found_computed_goto = 0;
181 make_blocks (seq);
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
191 /* Make sure there is always at least one block, even if it's empty. */
192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
193 create_empty_bb (ENTRY_BLOCK_PTR);
195 /* Adjust the size of the array. */
196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
207 /* Create the edges of the flowgraph. */
208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
210 make_edges ();
211 cleanup_dead_labels ();
212 htab_delete (discriminator_per_locus);
214 /* Debugging dumps. */
216 /* Write the flowgraph to a VCG file. */
218 int local_dump_flags;
219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
222 gimple_cfg2vcg (vcg_file);
223 dump_end (TDI_vcg, vcg_file);
227 #ifdef ENABLE_CHECKING
228 verify_stmts ();
229 #endif
232 static unsigned int
233 execute_build_cfg (void)
235 gimple_seq body = gimple_body (current_function_decl);
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
239 if (dump_file && (dump_flags & TDF_DETAILS))
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
244 return 0;
247 struct gimple_opt_pass pass_build_cfg =
250 GIMPLE_PASS,
251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
258 PROP_gimple_leh, /* properties_required */
259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
268 /* Return true if T is a computed goto. */
270 static bool
271 computed_goto_p (gimple t)
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
278 /* Search the CFG for any computed gotos. If found, factor them to a
279 common computed goto site. Also record the location of that site so
280 that we can un-factor the gotos after we have converted back to
281 normal form. */
283 static void
284 factor_computed_gotos (void)
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
296 FOR_EACH_BB (bb)
298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
301 if (gsi_end_p (gsi))
302 continue;
304 last = gsi_stmt (gsi);
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
314 gimple assignment;
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
319 if (!factored_computed_goto)
321 basic_block new_bb = create_empty_bb (bb);
322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
332 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
333 factored_computed_goto_label
334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
338 /* Build our new computed goto. */
339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
343 /* Copy the original computed goto's destination into VAR. */
344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
347 /* And re-vector the computed goto to the new destination. */
348 gimple_goto_set_dest (last, factored_label_decl);
354 /* Build a flowgraph for the sequence of stmts SEQ. */
356 static void
357 make_blocks (gimple_seq seq)
359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
361 bool start_new_block = true;
362 bool first_stmt_of_seq = true;
363 basic_block bb = ENTRY_BLOCK_PTR;
365 while (!gsi_end_p (i))
367 gimple prev_stmt;
369 prev_stmt = stmt;
370 stmt = gsi_stmt (i);
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
380 start_new_block = false;
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
385 gimple_set_bb (stmt, bb);
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
414 start_new_block = true;
417 gsi_next (&i);
418 first_stmt_of_seq = false;
423 /* Create and return a new empty basic block after bb AFTER. */
425 static basic_block
426 create_bb (void *h, void *e, basic_block after)
428 basic_block bb;
430 gcc_assert (!e);
432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
435 bb = alloc_block ();
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
445 /* Grow the basic block array if needed. */
446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
452 /* Add the newly created block to the array. */
453 SET_BASIC_BLOCK (last_basic_block, bb);
455 n_basic_blocks++;
456 last_basic_block++;
458 return bb;
462 /*---------------------------------------------------------------------------
463 Edge creation
464 ---------------------------------------------------------------------------*/
466 /* Fold COND_EXPR_COND of each COND_EXPR. */
468 void
469 fold_cond_expr_cond (void)
471 basic_block bb;
473 FOR_EACH_BB (bb)
475 gimple stmt = last_stmt (bb);
477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
479 tree cond;
480 bool zerop, onep;
482 fold_defer_overflow_warnings ();
483 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
484 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
485 if (cond)
487 zerop = integer_zerop (cond);
488 onep = integer_onep (cond);
490 else
491 zerop = onep = false;
493 fold_undefer_overflow_warnings (zerop || onep,
494 stmt,
495 WARN_STRICT_OVERFLOW_CONDITIONAL);
496 if (zerop)
497 gimple_cond_make_false (stmt);
498 else if (onep)
499 gimple_cond_make_true (stmt);
504 /* Join all the blocks in the flowgraph. */
506 static void
507 make_edges (void)
509 basic_block bb;
510 struct omp_region *cur_region = NULL;
512 /* Create an edge from entry to the first block with executable
513 statements in it. */
514 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
516 /* Traverse the basic block array placing edges. */
517 FOR_EACH_BB (bb)
519 gimple last = last_stmt (bb);
520 bool fallthru;
522 if (last)
524 enum gimple_code code = gimple_code (last);
525 switch (code)
527 case GIMPLE_GOTO:
528 make_goto_expr_edges (bb);
529 fallthru = false;
530 break;
531 case GIMPLE_RETURN:
532 make_edge (bb, EXIT_BLOCK_PTR, 0);
533 fallthru = false;
534 break;
535 case GIMPLE_COND:
536 make_cond_expr_edges (bb);
537 fallthru = false;
538 break;
539 case GIMPLE_SWITCH:
540 make_gimple_switch_edges (bb);
541 fallthru = false;
542 break;
543 case GIMPLE_RESX:
544 make_eh_edges (last);
545 fallthru = false;
546 break;
548 case GIMPLE_CALL:
549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
552 if (stmt_can_make_abnormal_goto (last))
553 make_abnormal_goto_edges (bb, true);
555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
559 /* Some calls are known not to return. */
560 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
561 break;
563 case GIMPLE_ASSIGN:
564 /* A GIMPLE_ASSIGN may throw internally and thus be considered
565 control-altering. */
566 if (is_ctrl_altering_stmt (last))
568 make_eh_edges (last);
570 fallthru = true;
571 break;
573 case GIMPLE_OMP_PARALLEL:
574 case GIMPLE_OMP_TASK:
575 case GIMPLE_OMP_FOR:
576 case GIMPLE_OMP_SINGLE:
577 case GIMPLE_OMP_MASTER:
578 case GIMPLE_OMP_ORDERED:
579 case GIMPLE_OMP_CRITICAL:
580 case GIMPLE_OMP_SECTION:
581 cur_region = new_omp_region (bb, code, cur_region);
582 fallthru = true;
583 break;
585 case GIMPLE_OMP_SECTIONS:
586 cur_region = new_omp_region (bb, code, cur_region);
587 fallthru = true;
588 break;
590 case GIMPLE_OMP_SECTIONS_SWITCH:
591 fallthru = false;
592 break;
595 case GIMPLE_OMP_ATOMIC_LOAD:
596 case GIMPLE_OMP_ATOMIC_STORE:
597 fallthru = true;
598 break;
601 case GIMPLE_OMP_RETURN:
602 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
603 somewhere other than the next block. This will be
604 created later. */
605 cur_region->exit = bb;
606 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
607 cur_region = cur_region->outer;
608 break;
610 case GIMPLE_OMP_CONTINUE:
611 cur_region->cont = bb;
612 switch (cur_region->type)
614 case GIMPLE_OMP_FOR:
615 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
616 succs edges as abnormal to prevent splitting
617 them. */
618 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
619 /* Make the loopback edge. */
620 make_edge (bb, single_succ (cur_region->entry),
621 EDGE_ABNORMAL);
623 /* Create an edge from GIMPLE_OMP_FOR to exit, which
624 corresponds to the case that the body of the loop
625 is not executed at all. */
626 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
627 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
628 fallthru = false;
629 break;
631 case GIMPLE_OMP_SECTIONS:
632 /* Wire up the edges into and out of the nested sections. */
634 basic_block switch_bb = single_succ (cur_region->entry);
636 struct omp_region *i;
637 for (i = cur_region->inner; i ; i = i->next)
639 gcc_assert (i->type == GIMPLE_OMP_SECTION);
640 make_edge (switch_bb, i->entry, 0);
641 make_edge (i->exit, bb, EDGE_FALLTHRU);
644 /* Make the loopback edge to the block with
645 GIMPLE_OMP_SECTIONS_SWITCH. */
646 make_edge (bb, switch_bb, 0);
648 /* Make the edge from the switch to exit. */
649 make_edge (switch_bb, bb->next_bb, 0);
650 fallthru = false;
652 break;
654 default:
655 gcc_unreachable ();
657 break;
659 default:
660 gcc_assert (!stmt_ends_bb_p (last));
661 fallthru = true;
664 else
665 fallthru = true;
667 if (fallthru)
669 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
670 if (last)
671 assign_discriminator (gimple_location (last), bb->next_bb);
675 if (root_omp_region)
676 free_omp_regions ();
678 /* Fold COND_EXPR_COND of each COND_EXPR. */
679 fold_cond_expr_cond ();
682 /* Trivial hash function for a location_t. ITEM is a pointer to
683 a hash table entry that maps a location_t to a discriminator. */
685 static unsigned int
686 locus_map_hash (const void *item)
688 return ((const struct locus_discrim_map *) item)->locus;
691 /* Equality function for the locus-to-discriminator map. VA and VB
692 point to the two hash table entries to compare. */
694 static int
695 locus_map_eq (const void *va, const void *vb)
697 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
698 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
699 return a->locus == b->locus;
702 /* Find the next available discriminator value for LOCUS. The
703 discriminator distinguishes among several basic blocks that
704 share a common locus, allowing for more accurate sample-based
705 profiling. */
707 static int
708 next_discriminator_for_locus (location_t locus)
710 struct locus_discrim_map item;
711 struct locus_discrim_map **slot;
713 item.locus = locus;
714 item.discriminator = 0;
715 slot = (struct locus_discrim_map **)
716 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
717 (hashval_t) locus, INSERT);
718 gcc_assert (slot);
719 if (*slot == HTAB_EMPTY_ENTRY)
721 *slot = XNEW (struct locus_discrim_map);
722 gcc_assert (*slot);
723 (*slot)->locus = locus;
724 (*slot)->discriminator = 0;
726 (*slot)->discriminator++;
727 return (*slot)->discriminator;
730 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
732 static bool
733 same_line_p (location_t locus1, location_t locus2)
735 expanded_location from, to;
737 if (locus1 == locus2)
738 return true;
740 from = expand_location (locus1);
741 to = expand_location (locus2);
743 if (from.line != to.line)
744 return false;
745 if (from.file == to.file)
746 return true;
747 return (from.file != NULL
748 && to.file != NULL
749 && strcmp (from.file, to.file) == 0);
752 /* Assign a unique discriminator value to block BB if it begins at the same
753 LOCUS as its predecessor block. */
755 static void
756 assign_discriminator (location_t locus, basic_block bb)
758 gimple to_stmt;
760 if (locus == 0 || bb->discriminator != 0)
761 return;
763 to_stmt = first_non_label_stmt (bb);
764 if (to_stmt && same_line_p (locus, gimple_location (to_stmt)))
765 bb->discriminator = next_discriminator_for_locus (locus);
768 /* Create the edges for a GIMPLE_COND starting at block BB. */
770 static void
771 make_cond_expr_edges (basic_block bb)
773 gimple entry = last_stmt (bb);
774 gimple then_stmt, else_stmt;
775 basic_block then_bb, else_bb;
776 tree then_label, else_label;
777 edge e;
778 location_t entry_locus;
780 gcc_assert (entry);
781 gcc_assert (gimple_code (entry) == GIMPLE_COND);
783 entry_locus = gimple_location (entry);
785 /* Entry basic blocks for each component. */
786 then_label = gimple_cond_true_label (entry);
787 else_label = gimple_cond_false_label (entry);
788 then_bb = label_to_block (then_label);
789 else_bb = label_to_block (else_label);
790 then_stmt = first_stmt (then_bb);
791 else_stmt = first_stmt (else_bb);
793 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
794 assign_discriminator (entry_locus, then_bb);
795 e->goto_locus = gimple_location (then_stmt);
796 if (e->goto_locus)
797 e->goto_block = gimple_block (then_stmt);
798 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
799 if (e)
801 assign_discriminator (entry_locus, else_bb);
802 e->goto_locus = gimple_location (else_stmt);
803 if (e->goto_locus)
804 e->goto_block = gimple_block (else_stmt);
807 /* We do not need the labels anymore. */
808 gimple_cond_set_true_label (entry, NULL_TREE);
809 gimple_cond_set_false_label (entry, NULL_TREE);
813 /* Called for each element in the hash table (P) as we delete the
814 edge to cases hash table.
816 Clear all the TREE_CHAINs to prevent problems with copying of
817 SWITCH_EXPRs and structure sharing rules, then free the hash table
818 element. */
820 static bool
821 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
822 void *data ATTRIBUTE_UNUSED)
824 tree t, next;
826 for (t = (tree) *value; t; t = next)
828 next = TREE_CHAIN (t);
829 TREE_CHAIN (t) = NULL;
832 *value = NULL;
833 return false;
836 /* Start recording information mapping edges to case labels. */
838 void
839 start_recording_case_labels (void)
841 gcc_assert (edge_to_cases == NULL);
842 edge_to_cases = pointer_map_create ();
845 /* Return nonzero if we are recording information for case labels. */
847 static bool
848 recording_case_labels_p (void)
850 return (edge_to_cases != NULL);
853 /* Stop recording information mapping edges to case labels and
854 remove any information we have recorded. */
855 void
856 end_recording_case_labels (void)
858 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
859 pointer_map_destroy (edge_to_cases);
860 edge_to_cases = NULL;
863 /* If we are inside a {start,end}_recording_cases block, then return
864 a chain of CASE_LABEL_EXPRs from T which reference E.
866 Otherwise return NULL. */
868 static tree
869 get_cases_for_edge (edge e, gimple t)
871 void **slot;
872 size_t i, n;
874 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
875 chains available. Return NULL so the caller can detect this case. */
876 if (!recording_case_labels_p ())
877 return NULL;
879 slot = pointer_map_contains (edge_to_cases, e);
880 if (slot)
881 return (tree) *slot;
883 /* If we did not find E in the hash table, then this must be the first
884 time we have been queried for information about E & T. Add all the
885 elements from T to the hash table then perform the query again. */
887 n = gimple_switch_num_labels (t);
888 for (i = 0; i < n; i++)
890 tree elt = gimple_switch_label (t, i);
891 tree lab = CASE_LABEL (elt);
892 basic_block label_bb = label_to_block (lab);
893 edge this_edge = find_edge (e->src, label_bb);
895 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
896 a new chain. */
897 slot = pointer_map_insert (edge_to_cases, this_edge);
898 TREE_CHAIN (elt) = (tree) *slot;
899 *slot = elt;
902 return (tree) *pointer_map_contains (edge_to_cases, e);
905 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
907 static void
908 make_gimple_switch_edges (basic_block bb)
910 gimple entry = last_stmt (bb);
911 location_t entry_locus;
912 size_t i, n;
914 entry_locus = gimple_location (entry);
916 n = gimple_switch_num_labels (entry);
918 for (i = 0; i < n; ++i)
920 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
921 basic_block label_bb = label_to_block (lab);
922 make_edge (bb, label_bb, 0);
923 assign_discriminator (entry_locus, label_bb);
928 /* Return the basic block holding label DEST. */
930 basic_block
931 label_to_block_fn (struct function *ifun, tree dest)
933 int uid = LABEL_DECL_UID (dest);
935 /* We would die hard when faced by an undefined label. Emit a label to
936 the very first basic block. This will hopefully make even the dataflow
937 and undefined variable warnings quite right. */
938 if ((errorcount || sorrycount) && uid < 0)
940 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
941 gimple stmt;
943 stmt = gimple_build_label (dest);
944 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
945 uid = LABEL_DECL_UID (dest);
947 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
948 <= (unsigned int) uid)
949 return NULL;
950 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
953 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
954 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
956 void
957 make_abnormal_goto_edges (basic_block bb, bool for_call)
959 basic_block target_bb;
960 gimple_stmt_iterator gsi;
962 FOR_EACH_BB (target_bb)
963 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
965 gimple label_stmt = gsi_stmt (gsi);
966 tree target;
968 if (gimple_code (label_stmt) != GIMPLE_LABEL)
969 break;
971 target = gimple_label_label (label_stmt);
973 /* Make an edge to every label block that has been marked as a
974 potential target for a computed goto or a non-local goto. */
975 if ((FORCED_LABEL (target) && !for_call)
976 || (DECL_NONLOCAL (target) && for_call))
978 make_edge (bb, target_bb, EDGE_ABNORMAL);
979 break;
984 /* Create edges for a goto statement at block BB. */
986 static void
987 make_goto_expr_edges (basic_block bb)
989 gimple_stmt_iterator last = gsi_last_bb (bb);
990 gimple goto_t = gsi_stmt (last);
992 /* A simple GOTO creates normal edges. */
993 if (simple_goto_p (goto_t))
995 tree dest = gimple_goto_dest (goto_t);
996 basic_block label_bb = label_to_block (dest);
997 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
998 e->goto_locus = gimple_location (goto_t);
999 assign_discriminator (e->goto_locus, label_bb);
1000 if (e->goto_locus)
1001 e->goto_block = gimple_block (goto_t);
1002 gsi_remove (&last, true);
1003 return;
1006 /* A computed GOTO creates abnormal edges. */
1007 make_abnormal_goto_edges (bb, false);
1011 /*---------------------------------------------------------------------------
1012 Flowgraph analysis
1013 ---------------------------------------------------------------------------*/
1015 /* Cleanup useless labels in basic blocks. This is something we wish
1016 to do early because it allows us to group case labels before creating
1017 the edges for the CFG, and it speeds up block statement iterators in
1018 all passes later on.
1019 We rerun this pass after CFG is created, to get rid of the labels that
1020 are no longer referenced. After then we do not run it any more, since
1021 (almost) no new labels should be created. */
1023 /* A map from basic block index to the leading label of that block. */
1024 static struct label_record
1026 /* The label. */
1027 tree label;
1029 /* True if the label is referenced from somewhere. */
1030 bool used;
1031 } *label_for_bb;
1033 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1034 static void
1035 update_eh_label (struct eh_region_d *region)
1037 tree old_label = get_eh_region_tree_label (region);
1038 if (old_label)
1040 tree new_label;
1041 basic_block bb = label_to_block (old_label);
1043 /* ??? After optimizing, there may be EH regions with labels
1044 that have already been removed from the function body, so
1045 there is no basic block for them. */
1046 if (! bb)
1047 return;
1049 new_label = label_for_bb[bb->index].label;
1050 label_for_bb[bb->index].used = true;
1051 set_eh_region_tree_label (region, new_label);
1056 /* Given LABEL return the first label in the same basic block. */
1058 static tree
1059 main_block_label (tree label)
1061 basic_block bb = label_to_block (label);
1062 tree main_label = label_for_bb[bb->index].label;
1064 /* label_to_block possibly inserted undefined label into the chain. */
1065 if (!main_label)
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
1075 /* Cleanup redundant labels. This is a three-step process:
1076 1) Find the leading label for each block.
1077 2) Redirect all references to labels to the leading labels.
1078 3) Cleanup all useless labels. */
1080 void
1081 cleanup_dead_labels (void)
1083 basic_block bb;
1084 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1086 /* Find a suitable label for each block. We use the first user-defined
1087 label if there is one, or otherwise just the first label we see. */
1088 FOR_EACH_BB (bb)
1090 gimple_stmt_iterator i;
1092 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1094 tree label;
1095 gimple stmt = gsi_stmt (i);
1097 if (gimple_code (stmt) != GIMPLE_LABEL)
1098 break;
1100 label = gimple_label_label (stmt);
1102 /* If we have not yet seen a label for the current block,
1103 remember this one and see if there are more labels. */
1104 if (!label_for_bb[bb->index].label)
1106 label_for_bb[bb->index].label = label;
1107 continue;
1110 /* If we did see a label for the current block already, but it
1111 is an artificially created label, replace it if the current
1112 label is a user defined label. */
1113 if (!DECL_ARTIFICIAL (label)
1114 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1116 label_for_bb[bb->index].label = label;
1117 break;
1122 /* Now redirect all jumps/branches to the selected label.
1123 First do so for each block ending in a control statement. */
1124 FOR_EACH_BB (bb)
1126 gimple stmt = last_stmt (bb);
1127 if (!stmt)
1128 continue;
1130 switch (gimple_code (stmt))
1132 case GIMPLE_COND:
1134 tree true_label = gimple_cond_true_label (stmt);
1135 tree false_label = gimple_cond_false_label (stmt);
1137 if (true_label)
1138 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1139 if (false_label)
1140 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1141 break;
1144 case GIMPLE_SWITCH:
1146 size_t i, n = gimple_switch_num_labels (stmt);
1148 /* Replace all destination labels. */
1149 for (i = 0; i < n; ++i)
1151 tree case_label = gimple_switch_label (stmt, i);
1152 tree label = main_block_label (CASE_LABEL (case_label));
1153 CASE_LABEL (case_label) = label;
1155 break;
1158 /* We have to handle gotos until they're removed, and we don't
1159 remove them until after we've created the CFG edges. */
1160 case GIMPLE_GOTO:
1161 if (!computed_goto_p (stmt))
1163 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1164 gimple_goto_set_dest (stmt, new_dest);
1165 break;
1168 default:
1169 break;
1173 for_each_eh_region (update_eh_label);
1175 /* Finally, purge dead labels. All user-defined labels and labels that
1176 can be the target of non-local gotos and labels which have their
1177 address taken are preserved. */
1178 FOR_EACH_BB (bb)
1180 gimple_stmt_iterator i;
1181 tree label_for_this_bb = label_for_bb[bb->index].label;
1183 if (!label_for_this_bb)
1184 continue;
1186 /* If the main label of the block is unused, we may still remove it. */
1187 if (!label_for_bb[bb->index].used)
1188 label_for_this_bb = NULL;
1190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1192 tree label;
1193 gimple stmt = gsi_stmt (i);
1195 if (gimple_code (stmt) != GIMPLE_LABEL)
1196 break;
1198 label = gimple_label_label (stmt);
1200 if (label == label_for_this_bb
1201 || !DECL_ARTIFICIAL (label)
1202 || DECL_NONLOCAL (label)
1203 || FORCED_LABEL (label))
1204 gsi_next (&i);
1205 else
1206 gsi_remove (&i, true);
1210 free (label_for_bb);
1213 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1214 and scan the sorted vector of cases. Combine the ones jumping to the
1215 same label.
1216 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1218 void
1219 group_case_labels (void)
1221 basic_block bb;
1223 FOR_EACH_BB (bb)
1225 gimple stmt = last_stmt (bb);
1226 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1228 int old_size = gimple_switch_num_labels (stmt);
1229 int i, j, new_size = old_size;
1230 tree default_case = NULL_TREE;
1231 tree default_label = NULL_TREE;
1232 bool has_default;
1234 /* The default label is always the first case in a switch
1235 statement after gimplification if it was not optimized
1236 away */
1237 if (!CASE_LOW (gimple_switch_default_label (stmt))
1238 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1240 default_case = gimple_switch_default_label (stmt);
1241 default_label = CASE_LABEL (default_case);
1242 has_default = true;
1244 else
1245 has_default = false;
1247 /* Look for possible opportunities to merge cases. */
1248 if (has_default)
1249 i = 1;
1250 else
1251 i = 0;
1252 while (i < old_size)
1254 tree base_case, base_label, base_high;
1255 base_case = gimple_switch_label (stmt, i);
1257 gcc_assert (base_case);
1258 base_label = CASE_LABEL (base_case);
1260 /* Discard cases that have the same destination as the
1261 default case. */
1262 if (base_label == default_label)
1264 gimple_switch_set_label (stmt, i, NULL_TREE);
1265 i++;
1266 new_size--;
1267 continue;
1270 base_high = CASE_HIGH (base_case)
1271 ? CASE_HIGH (base_case)
1272 : CASE_LOW (base_case);
1273 i++;
1275 /* Try to merge case labels. Break out when we reach the end
1276 of the label vector or when we cannot merge the next case
1277 label with the current one. */
1278 while (i < old_size)
1280 tree merge_case = gimple_switch_label (stmt, i);
1281 tree merge_label = CASE_LABEL (merge_case);
1282 tree t = int_const_binop (PLUS_EXPR, base_high,
1283 integer_one_node, 1);
1285 /* Merge the cases if they jump to the same place,
1286 and their ranges are consecutive. */
1287 if (merge_label == base_label
1288 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1290 base_high = CASE_HIGH (merge_case) ?
1291 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1292 CASE_HIGH (base_case) = base_high;
1293 gimple_switch_set_label (stmt, i, NULL_TREE);
1294 new_size--;
1295 i++;
1297 else
1298 break;
1302 /* Compress the case labels in the label vector, and adjust the
1303 length of the vector. */
1304 for (i = 0, j = 0; i < new_size; i++)
1306 while (! gimple_switch_label (stmt, j))
1307 j++;
1308 gimple_switch_set_label (stmt, i,
1309 gimple_switch_label (stmt, j++));
1312 gcc_assert (new_size <= old_size);
1313 gimple_switch_set_num_labels (stmt, new_size);
1318 /* Checks whether we can merge block B into block A. */
1320 static bool
1321 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1323 gimple stmt;
1324 gimple_stmt_iterator gsi;
1325 gimple_seq phis;
1327 if (!single_succ_p (a))
1328 return false;
1330 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1331 return false;
1333 if (single_succ (a) != b)
1334 return false;
1336 if (!single_pred_p (b))
1337 return false;
1339 if (b == EXIT_BLOCK_PTR)
1340 return false;
1342 /* If A ends by a statement causing exceptions or something similar, we
1343 cannot merge the blocks. */
1344 stmt = last_stmt (a);
1345 if (stmt && stmt_ends_bb_p (stmt))
1346 return false;
1348 /* Do not allow a block with only a non-local label to be merged. */
1349 if (stmt
1350 && gimple_code (stmt) == GIMPLE_LABEL
1351 && DECL_NONLOCAL (gimple_label_label (stmt)))
1352 return false;
1354 /* It must be possible to eliminate all phi nodes in B. If ssa form
1355 is not up-to-date, we cannot eliminate any phis; however, if only
1356 some symbols as whole are marked for renaming, this is not a problem,
1357 as phi nodes for those symbols are irrelevant in updating anyway. */
1358 phis = phi_nodes (b);
1359 if (!gimple_seq_empty_p (phis))
1361 gimple_stmt_iterator i;
1363 if (name_mappings_registered_p ())
1364 return false;
1366 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1368 gimple phi = gsi_stmt (i);
1370 if (!is_gimple_reg (gimple_phi_result (phi))
1371 && !may_propagate_copy (gimple_phi_result (phi),
1372 gimple_phi_arg_def (phi, 0)))
1373 return false;
1377 /* Do not remove user labels. */
1378 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1380 stmt = gsi_stmt (gsi);
1381 if (gimple_code (stmt) != GIMPLE_LABEL)
1382 break;
1383 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1384 return false;
1387 /* Protect the loop latches. */
1388 if (current_loops
1389 && b->loop_father->latch == b)
1390 return false;
1392 return true;
1395 /* Replaces all uses of NAME by VAL. */
1397 void
1398 replace_uses_by (tree name, tree val)
1400 imm_use_iterator imm_iter;
1401 use_operand_p use;
1402 gimple stmt;
1403 edge e;
1405 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1407 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1409 replace_exp (use, val);
1411 if (gimple_code (stmt) == GIMPLE_PHI)
1413 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1414 if (e->flags & EDGE_ABNORMAL)
1416 /* This can only occur for virtual operands, since
1417 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1418 would prevent replacement. */
1419 gcc_assert (!is_gimple_reg (name));
1420 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1425 if (gimple_code (stmt) != GIMPLE_PHI)
1427 size_t i;
1429 fold_stmt_inplace (stmt);
1430 if (cfgcleanup_altered_bbs)
1431 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1433 /* FIXME. This should go in update_stmt. */
1434 for (i = 0; i < gimple_num_ops (stmt); i++)
1436 tree op = gimple_op (stmt, i);
1437 /* Operands may be empty here. For example, the labels
1438 of a GIMPLE_COND are nulled out following the creation
1439 of the corresponding CFG edges. */
1440 if (op && TREE_CODE (op) == ADDR_EXPR)
1441 recompute_tree_invariant_for_addr_expr (op);
1444 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1445 update_stmt (stmt);
1449 gcc_assert (has_zero_uses (name));
1451 /* Also update the trees stored in loop structures. */
1452 if (current_loops)
1454 struct loop *loop;
1455 loop_iterator li;
1457 FOR_EACH_LOOP (li, loop, 0)
1459 substitute_in_loop_info (loop, name, val);
1464 /* Merge block B into block A. */
1466 static void
1467 gimple_merge_blocks (basic_block a, basic_block b)
1469 gimple_stmt_iterator last, gsi, psi;
1470 gimple_seq phis = phi_nodes (b);
1472 if (dump_file)
1473 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1475 /* Remove all single-valued PHI nodes from block B of the form
1476 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1477 gsi = gsi_last_bb (a);
1478 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1480 gimple phi = gsi_stmt (psi);
1481 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1482 gimple copy;
1483 bool may_replace_uses = !is_gimple_reg (def)
1484 || may_propagate_copy (def, use);
1486 /* In case we maintain loop closed ssa form, do not propagate arguments
1487 of loop exit phi nodes. */
1488 if (current_loops
1489 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1490 && is_gimple_reg (def)
1491 && TREE_CODE (use) == SSA_NAME
1492 && a->loop_father != b->loop_father)
1493 may_replace_uses = false;
1495 if (!may_replace_uses)
1497 gcc_assert (is_gimple_reg (def));
1499 /* Note that just emitting the copies is fine -- there is no problem
1500 with ordering of phi nodes. This is because A is the single
1501 predecessor of B, therefore results of the phi nodes cannot
1502 appear as arguments of the phi nodes. */
1503 copy = gimple_build_assign (def, use);
1504 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1505 remove_phi_node (&psi, false);
1507 else
1509 /* If we deal with a PHI for virtual operands, we can simply
1510 propagate these without fussing with folding or updating
1511 the stmt. */
1512 if (!is_gimple_reg (def))
1514 imm_use_iterator iter;
1515 use_operand_p use_p;
1516 gimple stmt;
1518 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1519 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1520 SET_USE (use_p, use);
1522 else
1523 replace_uses_by (def, use);
1525 remove_phi_node (&psi, true);
1529 /* Ensure that B follows A. */
1530 move_block_after (b, a);
1532 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1533 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1535 /* Remove labels from B and set gimple_bb to A for other statements. */
1536 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1538 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1540 gimple label = gsi_stmt (gsi);
1542 gsi_remove (&gsi, false);
1544 /* Now that we can thread computed gotos, we might have
1545 a situation where we have a forced label in block B
1546 However, the label at the start of block B might still be
1547 used in other ways (think about the runtime checking for
1548 Fortran assigned gotos). So we can not just delete the
1549 label. Instead we move the label to the start of block A. */
1550 if (FORCED_LABEL (gimple_label_label (label)))
1552 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1553 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1556 else
1558 gimple_set_bb (gsi_stmt (gsi), a);
1559 gsi_next (&gsi);
1563 /* Merge the sequences. */
1564 last = gsi_last_bb (a);
1565 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1566 set_bb_seq (b, NULL);
1568 if (cfgcleanup_altered_bbs)
1569 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1573 /* Return the one of two successors of BB that is not reachable by a
1574 complex edge, if there is one. Else, return BB. We use
1575 this in optimizations that use post-dominators for their heuristics,
1576 to catch the cases in C++ where function calls are involved. */
1578 basic_block
1579 single_noncomplex_succ (basic_block bb)
1581 edge e0, e1;
1582 if (EDGE_COUNT (bb->succs) != 2)
1583 return bb;
1585 e0 = EDGE_SUCC (bb, 0);
1586 e1 = EDGE_SUCC (bb, 1);
1587 if (e0->flags & EDGE_COMPLEX)
1588 return e1->dest;
1589 if (e1->flags & EDGE_COMPLEX)
1590 return e0->dest;
1592 return bb;
1596 /* Walk the function tree removing unnecessary statements.
1598 * Empty statement nodes are removed
1600 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1602 * Unnecessary COND_EXPRs are removed
1604 * Some unnecessary BIND_EXPRs are removed
1606 * GOTO_EXPRs immediately preceding destination are removed.
1608 Clearly more work could be done. The trick is doing the analysis
1609 and removal fast enough to be a net improvement in compile times.
1611 Note that when we remove a control structure such as a COND_EXPR
1612 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1613 to ensure we eliminate all the useless code. */
1615 struct rus_data
1617 bool repeat;
1618 bool may_throw;
1619 bool may_branch;
1620 bool has_label;
1621 bool last_was_goto;
1622 gimple_stmt_iterator last_goto_gsi;
1626 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1628 /* Given a statement sequence, find the first executable statement with
1629 location information, and warn that it is unreachable. When searching,
1630 descend into containers in execution order. */
1632 static bool
1633 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1635 gimple_stmt_iterator gsi;
1637 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1639 gimple stmt = gsi_stmt (gsi);
1641 if (gimple_has_location (stmt))
1643 location_t loc = gimple_location (stmt);
1644 if (LOCATION_LINE (loc) > 0)
1646 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1647 return true;
1651 switch (gimple_code (stmt))
1653 /* Unfortunately, we need the CFG now to detect unreachable
1654 branches in a conditional, so conditionals are not handled here. */
1656 case GIMPLE_TRY:
1657 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1658 return true;
1659 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1660 return true;
1661 break;
1663 case GIMPLE_CATCH:
1664 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1666 case GIMPLE_EH_FILTER:
1667 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1669 case GIMPLE_BIND:
1670 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1672 default:
1673 break;
1677 return false;
1680 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1682 static void
1683 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1685 gimple stmt = gsi_stmt (*gsi);
1687 /* The folded result must still be a conditional statement. */
1688 fold_stmt (gsi);
1689 gcc_assert (gsi_stmt (*gsi) == stmt);
1691 data->may_branch = true;
1693 /* Replace trivial conditionals with gotos. */
1694 if (gimple_cond_true_p (stmt))
1696 /* Goto THEN label. */
1697 tree then_label = gimple_cond_true_label (stmt);
1699 gsi_replace (gsi, gimple_build_goto (then_label), false);
1700 data->last_goto_gsi = *gsi;
1701 data->last_was_goto = true;
1702 data->repeat = true;
1704 else if (gimple_cond_false_p (stmt))
1706 /* Goto ELSE label. */
1707 tree else_label = gimple_cond_false_label (stmt);
1709 gsi_replace (gsi, gimple_build_goto (else_label), false);
1710 data->last_goto_gsi = *gsi;
1711 data->last_was_goto = true;
1712 data->repeat = true;
1714 else
1716 tree then_label = gimple_cond_true_label (stmt);
1717 tree else_label = gimple_cond_false_label (stmt);
1719 if (then_label == else_label)
1721 /* Goto common destination. */
1722 gsi_replace (gsi, gimple_build_goto (then_label), false);
1723 data->last_goto_gsi = *gsi;
1724 data->last_was_goto = true;
1725 data->repeat = true;
1729 gsi_next (gsi);
1731 data->last_was_goto = false;
1734 /* Helper for remove_useless_stmts_1.
1735 Handle the try-finally case for GIMPLE_TRY statements. */
1737 static void
1738 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1740 bool save_may_branch, save_may_throw;
1741 bool this_may_branch, this_may_throw;
1743 gimple_seq eval_seq, cleanup_seq;
1744 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1746 gimple stmt = gsi_stmt (*gsi);
1748 /* Collect may_branch and may_throw information for the body only. */
1749 save_may_branch = data->may_branch;
1750 save_may_throw = data->may_throw;
1751 data->may_branch = false;
1752 data->may_throw = false;
1753 data->last_was_goto = false;
1755 eval_seq = gimple_try_eval (stmt);
1756 eval_gsi = gsi_start (eval_seq);
1757 remove_useless_stmts_1 (&eval_gsi, data);
1759 this_may_branch = data->may_branch;
1760 this_may_throw = data->may_throw;
1761 data->may_branch |= save_may_branch;
1762 data->may_throw |= save_may_throw;
1763 data->last_was_goto = false;
1765 cleanup_seq = gimple_try_cleanup (stmt);
1766 cleanup_gsi = gsi_start (cleanup_seq);
1767 remove_useless_stmts_1 (&cleanup_gsi, data);
1769 /* If the body is empty, then we can emit the FINALLY block without
1770 the enclosing TRY_FINALLY_EXPR. */
1771 if (gimple_seq_empty_p (eval_seq))
1773 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1774 gsi_remove (gsi, false);
1775 data->repeat = true;
1778 /* If the handler is empty, then we can emit the TRY block without
1779 the enclosing TRY_FINALLY_EXPR. */
1780 else if (gimple_seq_empty_p (cleanup_seq))
1782 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1783 gsi_remove (gsi, false);
1784 data->repeat = true;
1787 /* If the body neither throws, nor branches, then we can safely
1788 string the TRY and FINALLY blocks together. */
1789 else if (!this_may_branch && !this_may_throw)
1791 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1792 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1793 gsi_remove (gsi, false);
1794 data->repeat = true;
1796 else
1797 gsi_next (gsi);
1800 /* Helper for remove_useless_stmts_1.
1801 Handle the try-catch case for GIMPLE_TRY statements. */
1803 static void
1804 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1806 bool save_may_throw, this_may_throw;
1808 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1809 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1811 gimple stmt = gsi_stmt (*gsi);
1813 /* Collect may_throw information for the body only. */
1814 save_may_throw = data->may_throw;
1815 data->may_throw = false;
1816 data->last_was_goto = false;
1818 eval_seq = gimple_try_eval (stmt);
1819 eval_gsi = gsi_start (eval_seq);
1820 remove_useless_stmts_1 (&eval_gsi, data);
1822 this_may_throw = data->may_throw;
1823 data->may_throw = save_may_throw;
1825 cleanup_seq = gimple_try_cleanup (stmt);
1827 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1828 if (!this_may_throw)
1830 if (warn_notreached)
1832 remove_useless_stmts_warn_notreached (cleanup_seq);
1834 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1835 gsi_remove (gsi, false);
1836 data->repeat = true;
1837 return;
1840 /* Process the catch clause specially. We may be able to tell that
1841 no exceptions propagate past this point. */
1843 this_may_throw = true;
1844 cleanup_gsi = gsi_start (cleanup_seq);
1845 stmt = gsi_stmt (cleanup_gsi);
1846 data->last_was_goto = false;
1848 switch (gimple_code (stmt))
1850 case GIMPLE_CATCH:
1851 /* If the first element is a catch, they all must be. */
1852 while (!gsi_end_p (cleanup_gsi))
1854 stmt = gsi_stmt (cleanup_gsi);
1855 /* If we catch all exceptions, then the body does not
1856 propagate exceptions past this point. */
1857 if (gimple_catch_types (stmt) == NULL)
1858 this_may_throw = false;
1859 data->last_was_goto = false;
1860 handler_seq = gimple_catch_handler (stmt);
1861 handler_gsi = gsi_start (handler_seq);
1862 remove_useless_stmts_1 (&handler_gsi, data);
1863 gsi_next (&cleanup_gsi);
1865 gsi_next (gsi);
1866 break;
1868 case GIMPLE_EH_FILTER:
1869 /* If the first element is an eh_filter, it should stand alone. */
1870 if (gimple_eh_filter_must_not_throw (stmt))
1871 this_may_throw = false;
1872 else if (gimple_eh_filter_types (stmt) == NULL)
1873 this_may_throw = false;
1874 failure_seq = gimple_eh_filter_failure (stmt);
1875 failure_gsi = gsi_start (failure_seq);
1876 remove_useless_stmts_1 (&failure_gsi, data);
1877 gsi_next (gsi);
1878 break;
1880 default:
1881 /* Otherwise this is a list of cleanup statements. */
1882 remove_useless_stmts_1 (&cleanup_gsi, data);
1884 /* If the cleanup is empty, then we can emit the TRY block without
1885 the enclosing TRY_CATCH_EXPR. */
1886 if (gimple_seq_empty_p (cleanup_seq))
1888 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1889 gsi_remove(gsi, false);
1890 data->repeat = true;
1892 else
1893 gsi_next (gsi);
1894 break;
1897 data->may_throw |= this_may_throw;
1900 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1902 static void
1903 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1905 tree block;
1906 gimple_seq body_seq, fn_body_seq;
1907 gimple_stmt_iterator body_gsi;
1909 gimple stmt = gsi_stmt (*gsi);
1911 /* First remove anything underneath the BIND_EXPR. */
1913 body_seq = gimple_bind_body (stmt);
1914 body_gsi = gsi_start (body_seq);
1915 remove_useless_stmts_1 (&body_gsi, data);
1917 /* If the GIMPLE_BIND has no variables, then we can pull everything
1918 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1919 GIMPLE_BIND for the current function or an inlined function.
1921 When this situation occurs we will want to apply this
1922 optimization again. */
1923 block = gimple_bind_block (stmt);
1924 fn_body_seq = gimple_body (current_function_decl);
1925 if (gimple_bind_vars (stmt) == NULL_TREE
1926 && (gimple_seq_empty_p (fn_body_seq)
1927 || stmt != gimple_seq_first_stmt (fn_body_seq))
1928 && (! block
1929 || ! BLOCK_ABSTRACT_ORIGIN (block)
1930 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1931 != FUNCTION_DECL)))
1933 tree var = NULL_TREE;
1934 /* Even if there are no gimple_bind_vars, there might be other
1935 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
1936 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
1937 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1938 if (TREE_CODE (var) == IMPORTED_DECL)
1939 break;
1940 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
1941 gsi_next (gsi);
1942 else
1944 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1945 gsi_remove (gsi, false);
1946 data->repeat = true;
1949 else
1950 gsi_next (gsi);
1953 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1955 static void
1956 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1958 gimple stmt = gsi_stmt (*gsi);
1960 tree dest = gimple_goto_dest (stmt);
1962 data->may_branch = true;
1963 data->last_was_goto = false;
1965 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1966 if (TREE_CODE (dest) == LABEL_DECL)
1968 data->last_goto_gsi = *gsi;
1969 data->last_was_goto = true;
1972 gsi_next(gsi);
1975 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1977 static void
1978 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1980 gimple stmt = gsi_stmt (*gsi);
1982 tree label = gimple_label_label (stmt);
1984 data->has_label = true;
1986 /* We do want to jump across non-local label receiver code. */
1987 if (DECL_NONLOCAL (label))
1988 data->last_was_goto = false;
1990 else if (data->last_was_goto
1991 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1993 /* Replace the preceding GIMPLE_GOTO statement with
1994 a GIMPLE_NOP, which will be subsequently removed.
1995 In this way, we avoid invalidating other iterators
1996 active on the statement sequence. */
1997 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1998 data->last_was_goto = false;
1999 data->repeat = true;
2002 /* ??? Add something here to delete unused labels. */
2004 gsi_next (gsi);
2008 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2010 void
2011 notice_special_calls (gimple call)
2013 int flags = gimple_call_flags (call);
2015 if (flags & ECF_MAY_BE_ALLOCA)
2016 cfun->calls_alloca = true;
2017 if (flags & ECF_RETURNS_TWICE)
2018 cfun->calls_setjmp = true;
2022 /* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2025 void
2026 clear_special_calls (void)
2028 cfun->calls_alloca = false;
2029 cfun->calls_setjmp = false;
2032 /* Remove useless statements from a statement sequence, and perform
2033 some preliminary simplifications. */
2035 static void
2036 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
2038 while (!gsi_end_p (*gsi))
2040 gimple stmt = gsi_stmt (*gsi);
2042 switch (gimple_code (stmt))
2044 case GIMPLE_COND:
2045 remove_useless_stmts_cond (gsi, data);
2046 break;
2048 case GIMPLE_GOTO:
2049 remove_useless_stmts_goto (gsi, data);
2050 break;
2052 case GIMPLE_LABEL:
2053 remove_useless_stmts_label (gsi, data);
2054 break;
2056 case GIMPLE_ASSIGN:
2057 fold_stmt (gsi);
2058 stmt = gsi_stmt (*gsi);
2059 data->last_was_goto = false;
2060 if (stmt_could_throw_p (stmt))
2061 data->may_throw = true;
2062 gsi_next (gsi);
2063 break;
2065 case GIMPLE_ASM:
2066 fold_stmt (gsi);
2067 data->last_was_goto = false;
2068 gsi_next (gsi);
2069 break;
2071 case GIMPLE_CALL:
2072 fold_stmt (gsi);
2073 stmt = gsi_stmt (*gsi);
2074 data->last_was_goto = false;
2075 if (is_gimple_call (stmt))
2076 notice_special_calls (stmt);
2078 /* We used to call update_gimple_call_flags here,
2079 which copied side-effects and nothrows status
2080 from the function decl to the call. In the new
2081 tuplified GIMPLE, the accessors for this information
2082 always consult the function decl, so this copying
2083 is no longer necessary. */
2084 if (stmt_could_throw_p (stmt))
2085 data->may_throw = true;
2086 gsi_next (gsi);
2087 break;
2089 case GIMPLE_RETURN:
2090 fold_stmt (gsi);
2091 data->last_was_goto = false;
2092 data->may_branch = true;
2093 gsi_next (gsi);
2094 break;
2096 case GIMPLE_BIND:
2097 remove_useless_stmts_bind (gsi, data);
2098 break;
2100 case GIMPLE_TRY:
2101 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2102 remove_useless_stmts_tc (gsi, data);
2103 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2104 remove_useless_stmts_tf (gsi, data);
2105 else
2106 gcc_unreachable ();
2107 break;
2109 case GIMPLE_CATCH:
2110 gcc_unreachable ();
2111 break;
2113 case GIMPLE_NOP:
2114 gsi_remove (gsi, false);
2115 break;
2117 case GIMPLE_OMP_FOR:
2119 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2120 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2122 remove_useless_stmts_1 (&pre_body_gsi, data);
2123 data->last_was_goto = false;
2125 /* FALLTHROUGH */
2126 case GIMPLE_OMP_CRITICAL:
2127 case GIMPLE_OMP_CONTINUE:
2128 case GIMPLE_OMP_MASTER:
2129 case GIMPLE_OMP_ORDERED:
2130 case GIMPLE_OMP_SECTION:
2131 case GIMPLE_OMP_SECTIONS:
2132 case GIMPLE_OMP_SINGLE:
2134 gimple_seq body_seq = gimple_omp_body (stmt);
2135 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2137 remove_useless_stmts_1 (&body_gsi, data);
2138 data->last_was_goto = false;
2139 gsi_next (gsi);
2141 break;
2143 case GIMPLE_OMP_PARALLEL:
2144 case GIMPLE_OMP_TASK:
2146 /* Make sure the outermost GIMPLE_BIND isn't removed
2147 as useless. */
2148 gimple_seq body_seq = gimple_omp_body (stmt);
2149 gimple bind = gimple_seq_first_stmt (body_seq);
2150 gimple_seq bind_seq = gimple_bind_body (bind);
2151 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2153 remove_useless_stmts_1 (&bind_gsi, data);
2154 data->last_was_goto = false;
2155 gsi_next (gsi);
2157 break;
2159 default:
2160 data->last_was_goto = false;
2161 gsi_next (gsi);
2162 break;
2167 /* Walk the function tree, removing useless statements and performing
2168 some preliminary simplifications. */
2170 static unsigned int
2171 remove_useless_stmts (void)
2173 struct rus_data data;
2175 clear_special_calls ();
2179 gimple_stmt_iterator gsi;
2181 gsi = gsi_start (gimple_body (current_function_decl));
2182 memset (&data, 0, sizeof (data));
2183 remove_useless_stmts_1 (&gsi, &data);
2185 while (data.repeat);
2187 #ifdef ENABLE_TYPES_CHECKING
2188 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2189 #endif
2191 return 0;
2195 struct gimple_opt_pass pass_remove_useless_stmts =
2198 GIMPLE_PASS,
2199 "useless", /* name */
2200 NULL, /* gate */
2201 remove_useless_stmts, /* execute */
2202 NULL, /* sub */
2203 NULL, /* next */
2204 0, /* static_pass_number */
2205 TV_NONE, /* tv_id */
2206 PROP_gimple_any, /* properties_required */
2207 0, /* properties_provided */
2208 0, /* properties_destroyed */
2209 0, /* todo_flags_start */
2210 TODO_dump_func /* todo_flags_finish */
2214 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2216 static void
2217 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2219 /* Since this block is no longer reachable, we can just delete all
2220 of its PHI nodes. */
2221 remove_phi_nodes (bb);
2223 /* Remove edges to BB's successors. */
2224 while (EDGE_COUNT (bb->succs) > 0)
2225 remove_edge (EDGE_SUCC (bb, 0));
2229 /* Remove statements of basic block BB. */
2231 static void
2232 remove_bb (basic_block bb)
2234 gimple_stmt_iterator i;
2235 source_location loc = UNKNOWN_LOCATION;
2237 if (dump_file)
2239 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2240 if (dump_flags & TDF_DETAILS)
2242 dump_bb (bb, dump_file, 0);
2243 fprintf (dump_file, "\n");
2247 if (current_loops)
2249 struct loop *loop = bb->loop_father;
2251 /* If a loop gets removed, clean up the information associated
2252 with it. */
2253 if (loop->latch == bb
2254 || loop->header == bb)
2255 free_numbers_of_iterations_estimates_loop (loop);
2258 /* Remove all the instructions in the block. */
2259 if (bb_seq (bb) != NULL)
2261 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2263 gimple stmt = gsi_stmt (i);
2264 if (gimple_code (stmt) == GIMPLE_LABEL
2265 && (FORCED_LABEL (gimple_label_label (stmt))
2266 || DECL_NONLOCAL (gimple_label_label (stmt))))
2268 basic_block new_bb;
2269 gimple_stmt_iterator new_gsi;
2271 /* A non-reachable non-local label may still be referenced.
2272 But it no longer needs to carry the extra semantics of
2273 non-locality. */
2274 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2276 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2277 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2280 new_bb = bb->prev_bb;
2281 new_gsi = gsi_start_bb (new_bb);
2282 gsi_remove (&i, false);
2283 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2285 else
2287 /* Release SSA definitions if we are in SSA. Note that we
2288 may be called when not in SSA. For example,
2289 final_cleanup calls this function via
2290 cleanup_tree_cfg. */
2291 if (gimple_in_ssa_p (cfun))
2292 release_defs (stmt);
2294 gsi_remove (&i, true);
2297 /* Don't warn for removed gotos. Gotos are often removed due to
2298 jump threading, thus resulting in bogus warnings. Not great,
2299 since this way we lose warnings for gotos in the original
2300 program that are indeed unreachable. */
2301 if (gimple_code (stmt) != GIMPLE_GOTO
2302 && gimple_has_location (stmt)
2303 && !loc)
2304 loc = gimple_location (stmt);
2308 /* If requested, give a warning that the first statement in the
2309 block is unreachable. We walk statements backwards in the
2310 loop above, so the last statement we process is the first statement
2311 in the block. */
2312 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2313 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2315 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2316 bb->il.gimple = NULL;
2320 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
2324 edge
2325 find_taken_edge (basic_block bb, tree val)
2327 gimple stmt;
2329 stmt = last_stmt (bb);
2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
2334 if (val == NULL)
2335 return NULL;
2337 if (!is_gimple_min_invariant (val))
2338 return NULL;
2340 if (gimple_code (stmt) == GIMPLE_COND)
2341 return find_taken_edge_cond_expr (bb, val);
2343 if (gimple_code (stmt) == GIMPLE_SWITCH)
2344 return find_taken_edge_switch_expr (bb, val);
2346 if (computed_goto_p (stmt))
2348 /* Only optimize if the argument is a label, if the argument is
2349 not a label then we can not construct a proper CFG.
2351 It may be the case that we only need to allow the LABEL_REF to
2352 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2353 appear inside a LABEL_EXPR just to be safe. */
2354 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2355 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2356 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2357 return NULL;
2360 gcc_unreachable ();
2363 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2364 statement, determine which of the outgoing edges will be taken out of the
2365 block. Return NULL if either edge may be taken. */
2367 static edge
2368 find_taken_edge_computed_goto (basic_block bb, tree val)
2370 basic_block dest;
2371 edge e = NULL;
2373 dest = label_to_block (val);
2374 if (dest)
2376 e = find_edge (bb, dest);
2377 gcc_assert (e != NULL);
2380 return e;
2383 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2384 statement, determine which of the two edges will be taken out of the
2385 block. Return NULL if either edge may be taken. */
2387 static edge
2388 find_taken_edge_cond_expr (basic_block bb, tree val)
2390 edge true_edge, false_edge;
2392 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2394 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2395 return (integer_zerop (val) ? false_edge : true_edge);
2398 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2399 statement, determine which edge will be taken out of the block. Return
2400 NULL if any edge may be taken. */
2402 static edge
2403 find_taken_edge_switch_expr (basic_block bb, tree val)
2405 basic_block dest_bb;
2406 edge e;
2407 gimple switch_stmt;
2408 tree taken_case;
2410 switch_stmt = last_stmt (bb);
2411 taken_case = find_case_label_for_value (switch_stmt, val);
2412 dest_bb = label_to_block (CASE_LABEL (taken_case));
2414 e = find_edge (bb, dest_bb);
2415 gcc_assert (e);
2416 return e;
2420 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
2424 static tree
2425 find_case_label_for_value (gimple switch_stmt, tree val)
2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
2430 for (low = 0, high = n; high - low > 1; )
2432 size_t i = (high + low) / 2;
2433 tree t = gimple_switch_label (switch_stmt, i);
2434 int cmp;
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2444 if (CASE_HIGH (t) == NULL)
2446 /* A singe-valued case label. */
2447 if (cmp == 0)
2448 return t;
2450 else
2452 /* A case range. We can only handle integer ranges. */
2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2454 return t;
2458 return default_case;
2462 /* Dump a basic block on stderr. */
2464 void
2465 gimple_debug_bb (basic_block bb)
2467 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2471 /* Dump basic block with index N on stderr. */
2473 basic_block
2474 gimple_debug_bb_n (int n)
2476 gimple_debug_bb (BASIC_BLOCK (n));
2477 return BASIC_BLOCK (n);
2481 /* Dump the CFG on stderr.
2483 FLAGS are the same used by the tree dumping functions
2484 (see TDF_* in tree-pass.h). */
2486 void
2487 gimple_debug_cfg (int flags)
2489 gimple_dump_cfg (stderr, flags);
2493 /* Dump the program showing basic block boundaries on the given FILE.
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2498 void
2499 gimple_dump_cfg (FILE *file, int flags)
2501 if (flags & TDF_DETAILS)
2503 const char *funcname
2504 = lang_hooks.decl_printable_name (current_function_decl, 2);
2506 fputc ('\n', file);
2507 fprintf (file, ";; Function %s\n\n", funcname);
2508 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2509 n_basic_blocks, n_edges, last_basic_block);
2511 brief_dump_cfg (file);
2512 fprintf (file, "\n");
2515 if (flags & TDF_STATS)
2516 dump_cfg_stats (file);
2518 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2522 /* Dump CFG statistics on FILE. */
2524 void
2525 dump_cfg_stats (FILE *file)
2527 static long max_num_merged_labels = 0;
2528 unsigned long size, total = 0;
2529 long num_edges;
2530 basic_block bb;
2531 const char * const fmt_str = "%-30s%-13s%12s\n";
2532 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2533 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2534 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2535 const char *funcname
2536 = lang_hooks.decl_printable_name (current_function_decl, 2);
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2546 size = n_basic_blocks * sizeof (struct basic_block_def);
2547 total += size;
2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2549 SCALE (size), LABEL (size));
2551 num_edges = 0;
2552 FOR_EACH_BB (bb)
2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (struct edge_def);
2555 total += size;
2556 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2560 LABEL (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2570 fprintf (file, "\n");
2574 /* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2577 void
2578 debug_cfg_stats (void)
2580 dump_cfg_stats (stderr);
2584 /* Dump the flowgraph to a .vcg FILE. */
2586 static void
2587 gimple_cfg2vcg (FILE *file)
2589 edge e;
2590 edge_iterator ei;
2591 basic_block bb;
2592 const char *funcname
2593 = lang_hooks.decl_printable_name (current_function_decl, 2);
2595 /* Write the file header. */
2596 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2597 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2598 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2600 /* Write blocks and edges. */
2601 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2603 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2604 e->dest->index);
2606 if (e->flags & EDGE_FAKE)
2607 fprintf (file, " linestyle: dotted priority: 10");
2608 else
2609 fprintf (file, " linestyle: solid priority: 100");
2611 fprintf (file, " }\n");
2613 fputc ('\n', file);
2615 FOR_EACH_BB (bb)
2617 enum gimple_code head_code, end_code;
2618 const char *head_name, *end_name;
2619 int head_line = 0;
2620 int end_line = 0;
2621 gimple first = first_stmt (bb);
2622 gimple last = last_stmt (bb);
2624 if (first)
2626 head_code = gimple_code (first);
2627 head_name = gimple_code_name[head_code];
2628 head_line = get_lineno (first);
2630 else
2631 head_name = "no-statement";
2633 if (last)
2635 end_code = gimple_code (last);
2636 end_name = gimple_code_name[end_code];
2637 end_line = get_lineno (last);
2639 else
2640 end_name = "no-statement";
2642 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2643 bb->index, bb->index, head_name, head_line, end_name,
2644 end_line);
2646 FOR_EACH_EDGE (e, ei, bb->succs)
2648 if (e->dest == EXIT_BLOCK_PTR)
2649 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2650 else
2651 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2653 if (e->flags & EDGE_FAKE)
2654 fprintf (file, " priority: 10 linestyle: dotted");
2655 else
2656 fprintf (file, " priority: 100 linestyle: solid");
2658 fprintf (file, " }\n");
2661 if (bb->next_bb != EXIT_BLOCK_PTR)
2662 fputc ('\n', file);
2665 fputs ("}\n\n", file);
2670 /*---------------------------------------------------------------------------
2671 Miscellaneous helpers
2672 ---------------------------------------------------------------------------*/
2674 /* Return true if T represents a stmt that always transfers control. */
2676 bool
2677 is_ctrl_stmt (gimple t)
2679 return gimple_code (t) == GIMPLE_COND
2680 || gimple_code (t) == GIMPLE_SWITCH
2681 || gimple_code (t) == GIMPLE_GOTO
2682 || gimple_code (t) == GIMPLE_RETURN
2683 || gimple_code (t) == GIMPLE_RESX;
2687 /* Return true if T is a statement that may alter the flow of control
2688 (e.g., a call to a non-returning function). */
2690 bool
2691 is_ctrl_altering_stmt (gimple t)
2693 gcc_assert (t);
2695 if (is_gimple_call (t))
2697 int flags = gimple_call_flags (t);
2699 /* A non-pure/const call alters flow control if the current
2700 function has nonlocal labels. */
2701 if (!(flags & (ECF_CONST | ECF_PURE))
2702 && cfun->has_nonlocal_label)
2703 return true;
2705 /* A call also alters control flow if it does not return. */
2706 if (gimple_call_flags (t) & ECF_NORETURN)
2707 return true;
2710 /* OpenMP directives alter control flow. */
2711 if (is_gimple_omp (t))
2712 return true;
2714 /* If a statement can throw, it alters control flow. */
2715 return stmt_can_throw_internal (t);
2719 /* Return true if T is a simple local goto. */
2721 bool
2722 simple_goto_p (gimple t)
2724 return (gimple_code (t) == GIMPLE_GOTO
2725 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2729 /* Return true if T can make an abnormal transfer of control flow.
2730 Transfers of control flow associated with EH are excluded. */
2732 bool
2733 stmt_can_make_abnormal_goto (gimple t)
2735 if (computed_goto_p (t))
2736 return true;
2737 if (is_gimple_call (t))
2738 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2739 return false;
2743 /* Return true if STMT should start a new basic block. PREV_STMT is
2744 the statement preceding STMT. It is used when STMT is a label or a
2745 case label. Labels should only start a new basic block if their
2746 previous statement wasn't a label. Otherwise, sequence of labels
2747 would generate unnecessary basic blocks that only contain a single
2748 label. */
2750 static inline bool
2751 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2753 if (stmt == NULL)
2754 return false;
2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (gimple_code (stmt) == GIMPLE_LABEL)
2761 /* Nonlocal and computed GOTO targets always start a new block. */
2762 if (DECL_NONLOCAL (gimple_label_label (stmt))
2763 || FORCED_LABEL (gimple_label_label (stmt)))
2764 return true;
2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2768 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2769 return true;
2771 cfg_stats.num_merged_labels++;
2772 return false;
2774 else
2775 return true;
2778 return false;
2782 /* Return true if T should end a basic block. */
2784 bool
2785 stmt_ends_bb_p (gimple t)
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2790 /* Remove block annotations and other data structures. */
2792 void
2793 delete_tree_cfg_annotations (void)
2795 label_to_block_map = NULL;
2799 /* Return the first statement in basic block BB. */
2801 gimple
2802 first_stmt (basic_block bb)
2804 gimple_stmt_iterator i = gsi_start_bb (bb);
2805 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2808 /* Return the first non-label statement in basic block BB. */
2810 static gimple
2811 first_non_label_stmt (basic_block bb)
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2819 /* Return the last statement in basic block BB. */
2821 gimple
2822 last_stmt (basic_block bb)
2824 gimple_stmt_iterator b = gsi_last_bb (bb);
2825 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2828 /* Return the last statement of an otherwise empty block. Return NULL
2829 if the block is totally empty, or if it contains more than one
2830 statement. */
2832 gimple
2833 last_and_only_stmt (basic_block bb)
2835 gimple_stmt_iterator i = gsi_last_bb (bb);
2836 gimple last, prev;
2838 if (gsi_end_p (i))
2839 return NULL;
2841 last = gsi_stmt (i);
2842 gsi_prev (&i);
2843 if (gsi_end_p (i))
2844 return last;
2846 /* Empty statements should no longer appear in the instruction stream.
2847 Everything that might have appeared before should be deleted by
2848 remove_useless_stmts, and the optimizers should just gsi_remove
2849 instead of smashing with build_empty_stmt.
2851 Thus the only thing that should appear here in a block containing
2852 one executable statement is a label. */
2853 prev = gsi_stmt (i);
2854 if (gimple_code (prev) == GIMPLE_LABEL)
2855 return last;
2856 else
2857 return NULL;
2860 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2862 static void
2863 reinstall_phi_args (edge new_edge, edge old_edge)
2865 edge_var_map_vector v;
2866 edge_var_map *vm;
2867 int i;
2868 gimple_stmt_iterator phis;
2870 v = redirect_edge_var_map_vector (old_edge);
2871 if (!v)
2872 return;
2874 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2875 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2876 i++, gsi_next (&phis))
2878 gimple phi = gsi_stmt (phis);
2879 tree result = redirect_edge_var_map_result (vm);
2880 tree arg = redirect_edge_var_map_def (vm);
2882 gcc_assert (result == gimple_phi_result (phi));
2884 add_phi_arg (phi, arg, new_edge);
2887 redirect_edge_var_map_clear (old_edge);
2890 /* Returns the basic block after which the new basic block created
2891 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2892 near its "logical" location. This is of most help to humans looking
2893 at debugging dumps. */
2895 static basic_block
2896 split_edge_bb_loc (edge edge_in)
2898 basic_block dest = edge_in->dest;
2900 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2901 return edge_in->src;
2902 else
2903 return dest->prev_bb;
2906 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2907 Abort on abnormal edges. */
2909 static basic_block
2910 gimple_split_edge (edge edge_in)
2912 basic_block new_bb, after_bb, dest;
2913 edge new_edge, e;
2915 /* Abnormal edges cannot be split. */
2916 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2918 dest = edge_in->dest;
2920 after_bb = split_edge_bb_loc (edge_in);
2922 new_bb = create_empty_bb (after_bb);
2923 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2924 new_bb->count = edge_in->count;
2925 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2926 new_edge->probability = REG_BR_PROB_BASE;
2927 new_edge->count = edge_in->count;
2929 e = redirect_edge_and_branch (edge_in, new_bb);
2930 gcc_assert (e == edge_in);
2931 reinstall_phi_args (new_edge, e);
2933 return new_bb;
2936 /* Callback for walk_tree, check that all elements with address taken are
2937 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2938 inside a PHI node. */
2940 static tree
2941 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2943 tree t = *tp, x;
2945 if (TYPE_P (t))
2946 *walk_subtrees = 0;
2948 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2949 #define CHECK_OP(N, MSG) \
2950 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2951 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2953 switch (TREE_CODE (t))
2955 case SSA_NAME:
2956 if (SSA_NAME_IN_FREE_LIST (t))
2958 error ("SSA name in freelist but still referenced");
2959 return *tp;
2961 break;
2963 case INDIRECT_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2967 error ("Indirect reference's operand is not a register or a constant.");
2968 return x;
2970 break;
2972 case ASSERT_EXPR:
2973 x = fold (ASSERT_EXPR_COND (t));
2974 if (x == boolean_false_node)
2976 error ("ASSERT_EXPR with an always-false condition");
2977 return *tp;
2979 break;
2981 case MODIFY_EXPR:
2982 error ("MODIFY_EXPR not expected while having tuples.");
2983 return *tp;
2985 case ADDR_EXPR:
2987 bool old_constant;
2988 bool old_side_effects;
2989 bool new_constant;
2990 bool new_side_effects;
2992 gcc_assert (is_gimple_address (t));
2994 old_constant = TREE_CONSTANT (t);
2995 old_side_effects = TREE_SIDE_EFFECTS (t);
2997 recompute_tree_invariant_for_addr_expr (t);
2998 new_side_effects = TREE_SIDE_EFFECTS (t);
2999 new_constant = TREE_CONSTANT (t);
3001 if (old_constant != new_constant)
3003 error ("constant not recomputed when ADDR_EXPR changed");
3004 return t;
3006 if (old_side_effects != new_side_effects)
3008 error ("side effects not recomputed when ADDR_EXPR changed");
3009 return t;
3012 /* Skip any references (they will be checked when we recurse down the
3013 tree) and ensure that any variable used as a prefix is marked
3014 addressable. */
3015 for (x = TREE_OPERAND (t, 0);
3016 handled_component_p (x);
3017 x = TREE_OPERAND (x, 0))
3020 if (!(TREE_CODE (x) == VAR_DECL
3021 || TREE_CODE (x) == PARM_DECL
3022 || TREE_CODE (x) == RESULT_DECL))
3023 return NULL;
3024 if (!TREE_ADDRESSABLE (x))
3026 error ("address taken, but ADDRESSABLE bit not set");
3027 return x;
3029 if (DECL_GIMPLE_REG_P (x))
3031 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3032 return x;
3035 break;
3038 case COND_EXPR:
3039 x = COND_EXPR_COND (t);
3040 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3042 error ("non-integral used in condition");
3043 return x;
3045 if (!is_gimple_condexpr (x))
3047 error ("invalid conditional operand");
3048 return x;
3050 break;
3052 case NON_LVALUE_EXPR:
3053 gcc_unreachable ();
3055 CASE_CONVERT:
3056 case FIX_TRUNC_EXPR:
3057 case FLOAT_EXPR:
3058 case NEGATE_EXPR:
3059 case ABS_EXPR:
3060 case BIT_NOT_EXPR:
3061 case TRUTH_NOT_EXPR:
3062 CHECK_OP (0, "invalid operand to unary operator");
3063 break;
3065 case REALPART_EXPR:
3066 case IMAGPART_EXPR:
3067 case COMPONENT_REF:
3068 case ARRAY_REF:
3069 case ARRAY_RANGE_REF:
3070 case BIT_FIELD_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
3076 while (handled_component_p (t))
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3083 CHECK_OP (1, "invalid array index");
3084 if (TREE_OPERAND (t, 2))
3085 CHECK_OP (2, "invalid array lower bound");
3086 if (TREE_OPERAND (t, 3))
3087 CHECK_OP (3, "invalid array stride");
3089 else if (TREE_CODE (t) == BIT_FIELD_REF)
3091 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3092 || !host_integerp (TREE_OPERAND (t, 2), 1))
3094 error ("invalid position or size operand to BIT_FIELD_REF");
3095 return t;
3097 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3098 && (TYPE_PRECISION (TREE_TYPE (t))
3099 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3101 error ("integral result type precision does not match "
3102 "field size of BIT_FIELD_REF");
3103 return t;
3105 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3106 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3107 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3109 error ("mode precision of non-integral result does not "
3110 "match field size of BIT_FIELD_REF");
3111 return t;
3115 t = TREE_OPERAND (t, 0);
3118 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3120 error ("invalid reference prefix");
3121 return t;
3123 *walk_subtrees = 0;
3124 break;
3125 case PLUS_EXPR:
3126 case MINUS_EXPR:
3127 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3128 POINTER_PLUS_EXPR. */
3129 if (POINTER_TYPE_P (TREE_TYPE (t)))
3131 error ("invalid operand to plus/minus, type is a pointer");
3132 return t;
3134 CHECK_OP (0, "invalid operand to binary operator");
3135 CHECK_OP (1, "invalid operand to binary operator");
3136 break;
3138 case POINTER_PLUS_EXPR:
3139 /* Check to make sure the first operand is a pointer or reference type. */
3140 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3142 error ("invalid operand to pointer plus, first operand is not a pointer");
3143 return t;
3145 /* Check to make sure the second operand is an integer with type of
3146 sizetype. */
3147 if (!useless_type_conversion_p (sizetype,
3148 TREE_TYPE (TREE_OPERAND (t, 1))))
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer with type of sizetype.");
3152 return t;
3154 /* FALLTHROUGH */
3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
3168 case LTGT_EXPR:
3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
3191 break;
3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3198 default:
3199 break;
3201 return NULL;
3203 #undef CHECK_OP
3207 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3208 Returns true if there is an error, otherwise false. */
3210 static bool
3211 verify_types_in_gimple_min_lval (tree expr)
3213 tree op;
3215 if (is_gimple_id (expr))
3216 return false;
3218 if (!INDIRECT_REF_P (expr)
3219 && TREE_CODE (expr) != TARGET_MEM_REF)
3221 error ("invalid expression for min lvalue");
3222 return true;
3225 /* TARGET_MEM_REFs are strange beasts. */
3226 if (TREE_CODE (expr) == TARGET_MEM_REF)
3227 return false;
3229 op = TREE_OPERAND (expr, 0);
3230 if (!is_gimple_val (op))
3232 error ("invalid operand in indirect reference");
3233 debug_generic_stmt (op);
3234 return true;
3236 if (!useless_type_conversion_p (TREE_TYPE (expr),
3237 TREE_TYPE (TREE_TYPE (op))))
3239 error ("type mismatch in indirect reference");
3240 debug_generic_stmt (TREE_TYPE (expr));
3241 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3242 return true;
3245 return false;
3248 /* Verify if EXPR is a valid GIMPLE reference expression. If
3249 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3250 if there is an error, otherwise false. */
3252 static bool
3253 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3255 while (handled_component_p (expr))
3257 tree op = TREE_OPERAND (expr, 0);
3259 if (TREE_CODE (expr) == ARRAY_REF
3260 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3262 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3263 || (TREE_OPERAND (expr, 2)
3264 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3265 || (TREE_OPERAND (expr, 3)
3266 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3268 error ("invalid operands to array reference");
3269 debug_generic_stmt (expr);
3270 return true;
3274 /* Verify if the reference array element types are compatible. */
3275 if (TREE_CODE (expr) == ARRAY_REF
3276 && !useless_type_conversion_p (TREE_TYPE (expr),
3277 TREE_TYPE (TREE_TYPE (op))))
3279 error ("type mismatch in array reference");
3280 debug_generic_stmt (TREE_TYPE (expr));
3281 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3282 return true;
3284 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3285 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3286 TREE_TYPE (TREE_TYPE (op))))
3288 error ("type mismatch in array range reference");
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3291 return true;
3294 if ((TREE_CODE (expr) == REALPART_EXPR
3295 || TREE_CODE (expr) == IMAGPART_EXPR)
3296 && !useless_type_conversion_p (TREE_TYPE (expr),
3297 TREE_TYPE (TREE_TYPE (op))))
3299 error ("type mismatch in real/imagpart reference");
3300 debug_generic_stmt (TREE_TYPE (expr));
3301 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3302 return true;
3305 if (TREE_CODE (expr) == COMPONENT_REF
3306 && !useless_type_conversion_p (TREE_TYPE (expr),
3307 TREE_TYPE (TREE_OPERAND (expr, 1))))
3309 error ("type mismatch in component reference");
3310 debug_generic_stmt (TREE_TYPE (expr));
3311 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3312 return true;
3315 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3316 is nothing to verify. Gross mismatches at most invoke
3317 undefined behavior. */
3318 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3319 && !handled_component_p (op))
3320 return false;
3322 expr = op;
3325 return ((require_lvalue || !is_gimple_min_invariant (expr))
3326 && verify_types_in_gimple_min_lval (expr));
3329 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3330 list of pointer-to types that is trivially convertible to DEST. */
3332 static bool
3333 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3335 tree src;
3337 if (!TYPE_POINTER_TO (src_obj))
3338 return true;
3340 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3341 if (useless_type_conversion_p (dest, src))
3342 return true;
3344 return false;
3347 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3348 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3350 static bool
3351 valid_fixed_convert_types_p (tree type1, tree type2)
3353 return (FIXED_POINT_TYPE_P (type1)
3354 && (INTEGRAL_TYPE_P (type2)
3355 || SCALAR_FLOAT_TYPE_P (type2)
3356 || FIXED_POINT_TYPE_P (type2)));
3359 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3360 is a problem, otherwise false. */
3362 static bool
3363 verify_gimple_call (gimple stmt)
3365 tree fn = gimple_call_fn (stmt);
3366 tree fntype;
3368 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3369 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3370 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3372 error ("non-function in gimple call");
3373 return true;
3376 if (gimple_call_lhs (stmt)
3377 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3379 error ("invalid LHS in gimple call");
3380 return true;
3383 fntype = TREE_TYPE (TREE_TYPE (fn));
3384 if (gimple_call_lhs (stmt)
3385 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3386 TREE_TYPE (fntype))
3387 /* ??? At least C++ misses conversions at assignments from
3388 void * call results.
3389 ??? Java is completely off. Especially with functions
3390 returning java.lang.Object.
3391 For now simply allow arbitrary pointer type conversions. */
3392 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3393 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3395 error ("invalid conversion in gimple call");
3396 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3397 debug_generic_stmt (TREE_TYPE (fntype));
3398 return true;
3401 /* ??? The C frontend passes unpromoted arguments in case it
3402 didn't see a function declaration before the call. So for now
3403 leave the call arguments unverified. Once we gimplify
3404 unit-at-a-time we have a chance to fix this. */
3406 return false;
3409 /* Verifies the gimple comparison with the result type TYPE and
3410 the operands OP0 and OP1. */
3412 static bool
3413 verify_gimple_comparison (tree type, tree op0, tree op1)
3415 tree op0_type = TREE_TYPE (op0);
3416 tree op1_type = TREE_TYPE (op1);
3418 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3420 error ("invalid operands in gimple comparison");
3421 return true;
3424 /* For comparisons we do not have the operations type as the
3425 effective type the comparison is carried out in. Instead
3426 we require that either the first operand is trivially
3427 convertible into the second, or the other way around.
3428 The resulting type of a comparison may be any integral type.
3429 Because we special-case pointers to void we allow
3430 comparisons of pointers with the same mode as well. */
3431 if ((!useless_type_conversion_p (op0_type, op1_type)
3432 && !useless_type_conversion_p (op1_type, op0_type)
3433 && (!POINTER_TYPE_P (op0_type)
3434 || !POINTER_TYPE_P (op1_type)
3435 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3436 || !INTEGRAL_TYPE_P (type))
3438 error ("type mismatch in comparison expression");
3439 debug_generic_expr (type);
3440 debug_generic_expr (op0_type);
3441 debug_generic_expr (op1_type);
3442 return true;
3445 return false;
3448 /* Verify a gimple assignment statement STMT with an unary rhs.
3449 Returns true if anything is wrong. */
3451 static bool
3452 verify_gimple_assign_unary (gimple stmt)
3454 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3455 tree lhs = gimple_assign_lhs (stmt);
3456 tree lhs_type = TREE_TYPE (lhs);
3457 tree rhs1 = gimple_assign_rhs1 (stmt);
3458 tree rhs1_type = TREE_TYPE (rhs1);
3460 if (!is_gimple_reg (lhs)
3461 && !(optimize == 0
3462 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3464 error ("non-register as LHS of unary operation");
3465 return true;
3468 if (!is_gimple_val (rhs1))
3470 error ("invalid operand in unary operation");
3471 return true;
3474 /* First handle conversions. */
3475 switch (rhs_code)
3477 CASE_CONVERT:
3479 /* Allow conversions between integral types and pointers only if
3480 there is no sign or zero extension involved.
3481 For targets were the precision of sizetype doesn't match that
3482 of pointers we need to allow arbitrary conversions from and
3483 to sizetype. */
3484 if ((POINTER_TYPE_P (lhs_type)
3485 && INTEGRAL_TYPE_P (rhs1_type)
3486 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3487 || rhs1_type == sizetype))
3488 || (POINTER_TYPE_P (rhs1_type)
3489 && INTEGRAL_TYPE_P (lhs_type)
3490 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3491 || lhs_type == sizetype)))
3492 return false;
3494 /* Allow conversion from integer to offset type and vice versa. */
3495 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3496 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3497 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3498 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3499 return false;
3501 /* Otherwise assert we are converting between types of the
3502 same kind. */
3503 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3505 error ("invalid types in nop conversion");
3506 debug_generic_expr (lhs_type);
3507 debug_generic_expr (rhs1_type);
3508 return true;
3511 return false;
3514 case FIXED_CONVERT_EXPR:
3516 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3517 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3519 error ("invalid types in fixed-point conversion");
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
3522 return true;
3525 return false;
3528 case FLOAT_EXPR:
3530 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3532 error ("invalid types in conversion to floating point");
3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
3535 return true;
3538 return false;
3541 case FIX_TRUNC_EXPR:
3543 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3545 error ("invalid types in conversion to integer");
3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
3548 return true;
3551 return false;
3554 case VEC_UNPACK_HI_EXPR:
3555 case VEC_UNPACK_LO_EXPR:
3556 case REDUC_MAX_EXPR:
3557 case REDUC_MIN_EXPR:
3558 case REDUC_PLUS_EXPR:
3559 case VEC_UNPACK_FLOAT_HI_EXPR:
3560 case VEC_UNPACK_FLOAT_LO_EXPR:
3561 /* FIXME. */
3562 return false;
3564 case TRUTH_NOT_EXPR:
3565 case NEGATE_EXPR:
3566 case ABS_EXPR:
3567 case BIT_NOT_EXPR:
3568 case PAREN_EXPR:
3569 case NON_LVALUE_EXPR:
3570 case CONJ_EXPR:
3571 break;
3573 default:
3574 gcc_unreachable ();
3577 /* For the remaining codes assert there is no conversion involved. */
3578 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3580 error ("non-trivial conversion in unary operation");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 return true;
3586 return false;
3589 /* Verify a gimple assignment statement STMT with a binary rhs.
3590 Returns true if anything is wrong. */
3592 static bool
3593 verify_gimple_assign_binary (gimple stmt)
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3603 if (!is_gimple_reg (lhs)
3604 && !(optimize == 0
3605 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3607 error ("non-register as LHS of binary operation");
3608 return true;
3611 if (!is_gimple_val (rhs1)
3612 || !is_gimple_val (rhs2))
3614 error ("invalid operands in binary operation");
3615 return true;
3618 /* First handle operations that involve different types. */
3619 switch (rhs_code)
3621 case COMPLEX_EXPR:
3623 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3624 || !(INTEGRAL_TYPE_P (rhs1_type)
3625 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3626 || !(INTEGRAL_TYPE_P (rhs2_type)
3627 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3629 error ("type mismatch in complex expression");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 debug_generic_expr (rhs2_type);
3633 return true;
3636 return false;
3639 case LSHIFT_EXPR:
3640 case RSHIFT_EXPR:
3641 case LROTATE_EXPR:
3642 case RROTATE_EXPR:
3644 /* Shifts and rotates are ok on integral types, fixed point
3645 types and integer vector types. */
3646 if ((!INTEGRAL_TYPE_P (rhs1_type)
3647 && !FIXED_POINT_TYPE_P (rhs1_type)
3648 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3649 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3650 || (!INTEGRAL_TYPE_P (rhs2_type)
3651 /* Vector shifts of vectors are also ok. */
3652 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3653 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3654 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3655 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
3658 error ("type mismatch in shift expression");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
3662 return true;
3665 return false;
3668 case VEC_LSHIFT_EXPR:
3669 case VEC_RSHIFT_EXPR:
3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3672 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
3684 return true;
3686 /* For shifting a vector of floating point components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3697 return false;
3700 case PLUS_EXPR:
3702 /* We use regular PLUS_EXPR for vectors.
3703 ??? This just makes the checker happy and may not be what is
3704 intended. */
3705 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3706 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3711 error ("invalid non-vector operands to vector valued plus");
3712 return true;
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3716 rhs2_type = TREE_TYPE (rhs2_type);
3717 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3718 the pointer to 2nd place. */
3719 if (POINTER_TYPE_P (rhs2_type))
3721 tree tem = rhs1_type;
3722 rhs1_type = rhs2_type;
3723 rhs2_type = tem;
3725 goto do_pointer_plus_expr_check;
3728 /* Fallthru. */
3729 case MINUS_EXPR:
3731 if (POINTER_TYPE_P (lhs_type)
3732 || POINTER_TYPE_P (rhs1_type)
3733 || POINTER_TYPE_P (rhs2_type))
3735 error ("invalid (pointer) operands to plus/minus");
3736 return true;
3739 /* Continue with generic binary expression handling. */
3740 break;
3743 case POINTER_PLUS_EXPR:
3745 do_pointer_plus_expr_check:
3746 if (!POINTER_TYPE_P (rhs1_type)
3747 || !useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (sizetype, rhs2_type))
3750 error ("type mismatch in pointer plus expression");
3751 debug_generic_stmt (lhs_type);
3752 debug_generic_stmt (rhs1_type);
3753 debug_generic_stmt (rhs2_type);
3754 return true;
3757 return false;
3760 case TRUTH_ANDIF_EXPR:
3761 case TRUTH_ORIF_EXPR:
3762 gcc_unreachable ();
3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3768 /* We allow any kind of integral typed argument and result. */
3769 if (!INTEGRAL_TYPE_P (rhs1_type)
3770 || !INTEGRAL_TYPE_P (rhs2_type)
3771 || !INTEGRAL_TYPE_P (lhs_type))
3773 error ("type mismatch in binary truth expression");
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
3777 return true;
3780 return false;
3783 case LT_EXPR:
3784 case LE_EXPR:
3785 case GT_EXPR:
3786 case GE_EXPR:
3787 case EQ_EXPR:
3788 case NE_EXPR:
3789 case UNORDERED_EXPR:
3790 case ORDERED_EXPR:
3791 case UNLT_EXPR:
3792 case UNLE_EXPR:
3793 case UNGT_EXPR:
3794 case UNGE_EXPR:
3795 case UNEQ_EXPR:
3796 case LTGT_EXPR:
3797 /* Comparisons are also binary, but the result type is not
3798 connected to the operand types. */
3799 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3801 case WIDEN_SUM_EXPR:
3802 case WIDEN_MULT_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_PACK_TRUNC_EXPR:
3806 case VEC_PACK_SAT_EXPR:
3807 case VEC_PACK_FIX_TRUNC_EXPR:
3808 case VEC_EXTRACT_EVEN_EXPR:
3809 case VEC_EXTRACT_ODD_EXPR:
3810 case VEC_INTERLEAVE_HIGH_EXPR:
3811 case VEC_INTERLEAVE_LOW_EXPR:
3812 /* FIXME. */
3813 return false;
3815 case MULT_EXPR:
3816 case TRUNC_DIV_EXPR:
3817 case CEIL_DIV_EXPR:
3818 case FLOOR_DIV_EXPR:
3819 case ROUND_DIV_EXPR:
3820 case TRUNC_MOD_EXPR:
3821 case CEIL_MOD_EXPR:
3822 case FLOOR_MOD_EXPR:
3823 case ROUND_MOD_EXPR:
3824 case RDIV_EXPR:
3825 case EXACT_DIV_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
3831 /* Continue with generic binary expression handling. */
3832 break;
3834 default:
3835 gcc_unreachable ();
3838 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3839 || !useless_type_conversion_p (lhs_type, rhs2_type))
3841 error ("type mismatch in binary expression");
3842 debug_generic_stmt (lhs_type);
3843 debug_generic_stmt (rhs1_type);
3844 debug_generic_stmt (rhs2_type);
3845 return true;
3848 return false;
3851 /* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3854 static bool
3855 verify_gimple_assign_single (gimple stmt)
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
3872 if (handled_component_p (lhs))
3873 res |= verify_types_in_gimple_reference (lhs, true);
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
3878 case ADDR_EXPR:
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3883 error ("invalid operand in unary expression");
3884 return true;
3887 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3888 TREE_TYPE (op)))
3890 error ("type mismatch in address expression");
3891 debug_generic_stmt (lhs_type);
3892 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3893 return true;
3896 return verify_types_in_gimple_reference (op, true);
3899 /* tcc_reference */
3900 case COMPONENT_REF:
3901 case BIT_FIELD_REF:
3902 case INDIRECT_REF:
3903 case ALIGN_INDIRECT_REF:
3904 case MISALIGNED_INDIRECT_REF:
3905 case ARRAY_REF:
3906 case ARRAY_RANGE_REF:
3907 case VIEW_CONVERT_EXPR:
3908 case REALPART_EXPR:
3909 case IMAGPART_EXPR:
3910 case TARGET_MEM_REF:
3911 if (!is_gimple_reg (lhs)
3912 && is_gimple_reg_type (TREE_TYPE (lhs)))
3914 error ("invalid rhs for gimple memory store");
3915 debug_generic_stmt (lhs);
3916 debug_generic_stmt (rhs1);
3917 return true;
3919 return res || verify_types_in_gimple_reference (rhs1, false);
3921 /* tcc_constant */
3922 case SSA_NAME:
3923 case INTEGER_CST:
3924 case REAL_CST:
3925 case FIXED_CST:
3926 case COMPLEX_CST:
3927 case VECTOR_CST:
3928 case STRING_CST:
3929 return res;
3931 /* tcc_declaration */
3932 case CONST_DECL:
3933 return res;
3934 case VAR_DECL:
3935 case PARM_DECL:
3936 if (!is_gimple_reg (lhs)
3937 && !is_gimple_reg (rhs1)
3938 && is_gimple_reg_type (TREE_TYPE (lhs)))
3940 error ("invalid rhs for gimple memory store");
3941 debug_generic_stmt (lhs);
3942 debug_generic_stmt (rhs1);
3943 return true;
3945 return res;
3947 case COND_EXPR:
3948 case CONSTRUCTOR:
3949 case OBJ_TYPE_REF:
3950 case ASSERT_EXPR:
3951 case WITH_SIZE_EXPR:
3952 case EXC_PTR_EXPR:
3953 case FILTER_EXPR:
3954 case POLYNOMIAL_CHREC:
3955 case DOT_PROD_EXPR:
3956 case VEC_COND_EXPR:
3957 case REALIGN_LOAD_EXPR:
3958 /* FIXME. */
3959 return res;
3961 default:;
3964 return res;
3967 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3968 is a problem, otherwise false. */
3970 static bool
3971 verify_gimple_assign (gimple stmt)
3973 switch (gimple_assign_rhs_class (stmt))
3975 case GIMPLE_SINGLE_RHS:
3976 return verify_gimple_assign_single (stmt);
3978 case GIMPLE_UNARY_RHS:
3979 return verify_gimple_assign_unary (stmt);
3981 case GIMPLE_BINARY_RHS:
3982 return verify_gimple_assign_binary (stmt);
3984 default:
3985 gcc_unreachable ();
3989 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3990 is a problem, otherwise false. */
3992 static bool
3993 verify_gimple_return (gimple stmt)
3995 tree op = gimple_return_retval (stmt);
3996 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3998 /* We cannot test for present return values as we do not fix up missing
3999 return values from the original source. */
4000 if (op == NULL)
4001 return false;
4003 if (!is_gimple_val (op)
4004 && TREE_CODE (op) != RESULT_DECL)
4006 error ("invalid operand in return statement");
4007 debug_generic_stmt (op);
4008 return true;
4011 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4012 /* ??? With C++ we can have the situation that the result
4013 decl is a reference type while the return type is an aggregate. */
4014 && !(TREE_CODE (op) == RESULT_DECL
4015 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4016 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4018 error ("invalid conversion in return statement");
4019 debug_generic_stmt (restype);
4020 debug_generic_stmt (TREE_TYPE (op));
4021 return true;
4024 return false;
4028 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4029 is a problem, otherwise false. */
4031 static bool
4032 verify_gimple_goto (gimple stmt)
4034 tree dest = gimple_goto_dest (stmt);
4036 /* ??? We have two canonical forms of direct goto destinations, a
4037 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4038 if (TREE_CODE (dest) != LABEL_DECL
4039 && (!is_gimple_val (dest)
4040 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4042 error ("goto destination is neither a label nor a pointer");
4043 return true;
4046 return false;
4049 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4050 is a problem, otherwise false. */
4052 static bool
4053 verify_gimple_switch (gimple stmt)
4055 if (!is_gimple_val (gimple_switch_index (stmt)))
4057 error ("invalid operand to switch statement");
4058 debug_generic_stmt (gimple_switch_index (stmt));
4059 return true;
4062 return false;
4066 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4067 and false otherwise. */
4069 static bool
4070 verify_gimple_phi (gimple stmt)
4072 tree type = TREE_TYPE (gimple_phi_result (stmt));
4073 unsigned i;
4075 if (!is_gimple_variable (gimple_phi_result (stmt)))
4077 error ("Invalid PHI result");
4078 return true;
4081 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4083 tree arg = gimple_phi_arg_def (stmt, i);
4084 if ((is_gimple_reg (gimple_phi_result (stmt))
4085 && !is_gimple_val (arg))
4086 || (!is_gimple_reg (gimple_phi_result (stmt))
4087 && !is_gimple_addressable (arg)))
4089 error ("Invalid PHI argument");
4090 debug_generic_stmt (arg);
4091 return true;
4093 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4095 error ("Incompatible types in PHI argument %u", i);
4096 debug_generic_stmt (type);
4097 debug_generic_stmt (TREE_TYPE (arg));
4098 return true;
4102 return false;
4106 /* Verify the GIMPLE statement STMT. Returns true if there is an
4107 error, otherwise false. */
4109 static bool
4110 verify_types_in_gimple_stmt (gimple stmt)
4112 if (is_gimple_omp (stmt))
4114 /* OpenMP directives are validated by the FE and never operated
4115 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4116 non-gimple expressions when the main index variable has had
4117 its address taken. This does not affect the loop itself
4118 because the header of an GIMPLE_OMP_FOR is merely used to determine
4119 how to setup the parallel iteration. */
4120 return false;
4123 switch (gimple_code (stmt))
4125 case GIMPLE_ASSIGN:
4126 return verify_gimple_assign (stmt);
4128 case GIMPLE_LABEL:
4129 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
4131 case GIMPLE_CALL:
4132 return verify_gimple_call (stmt);
4134 case GIMPLE_COND:
4135 return verify_gimple_comparison (boolean_type_node,
4136 gimple_cond_lhs (stmt),
4137 gimple_cond_rhs (stmt));
4139 case GIMPLE_GOTO:
4140 return verify_gimple_goto (stmt);
4142 case GIMPLE_SWITCH:
4143 return verify_gimple_switch (stmt);
4145 case GIMPLE_RETURN:
4146 return verify_gimple_return (stmt);
4148 case GIMPLE_ASM:
4149 return false;
4151 case GIMPLE_PHI:
4152 return verify_gimple_phi (stmt);
4154 /* Tuples that do not have tree operands. */
4155 case GIMPLE_NOP:
4156 case GIMPLE_RESX:
4157 case GIMPLE_PREDICT:
4158 return false;
4160 default:
4161 gcc_unreachable ();
4165 /* Verify the GIMPLE statements inside the sequence STMTS. */
4167 static bool
4168 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4170 gimple_stmt_iterator ittr;
4171 bool err = false;
4173 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4175 gimple stmt = gsi_stmt (ittr);
4177 switch (gimple_code (stmt))
4179 case GIMPLE_BIND:
4180 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4181 break;
4183 case GIMPLE_TRY:
4184 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4185 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4186 break;
4188 case GIMPLE_EH_FILTER:
4189 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4190 break;
4192 case GIMPLE_CATCH:
4193 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4194 break;
4196 default:
4198 bool err2 = verify_types_in_gimple_stmt (stmt);
4199 if (err2)
4200 debug_gimple_stmt (stmt);
4201 err |= err2;
4206 return err;
4210 /* Verify the GIMPLE statements inside the statement list STMTS. */
4212 void
4213 verify_types_in_gimple_seq (gimple_seq stmts)
4215 if (verify_types_in_gimple_seq_2 (stmts))
4216 internal_error ("verify_gimple failed");
4220 /* Verify STMT, return true if STMT is not in GIMPLE form.
4221 TODO: Implement type checking. */
4223 static bool
4224 verify_stmt (gimple_stmt_iterator *gsi)
4226 tree addr;
4227 struct walk_stmt_info wi;
4228 bool last_in_block = gsi_one_before_end_p (*gsi);
4229 gimple stmt = gsi_stmt (*gsi);
4231 if (is_gimple_omp (stmt))
4233 /* OpenMP directives are validated by the FE and never operated
4234 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4235 non-gimple expressions when the main index variable has had
4236 its address taken. This does not affect the loop itself
4237 because the header of an GIMPLE_OMP_FOR is merely used to determine
4238 how to setup the parallel iteration. */
4239 return false;
4242 /* FIXME. The C frontend passes unpromoted arguments in case it
4243 didn't see a function declaration before the call. */
4244 if (is_gimple_call (stmt))
4246 tree decl;
4248 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4250 error ("invalid function in call statement");
4251 return true;
4254 decl = gimple_call_fndecl (stmt);
4255 if (decl
4256 && TREE_CODE (decl) == FUNCTION_DECL
4257 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4258 && (!DECL_PURE_P (decl))
4259 && (!TREE_READONLY (decl)))
4261 error ("invalid pure const state for function");
4262 return true;
4266 memset (&wi, 0, sizeof (wi));
4267 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4268 if (addr)
4270 debug_generic_expr (addr);
4271 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
4272 debug_gimple_stmt (stmt);
4273 return true;
4276 /* If the statement is marked as part of an EH region, then it is
4277 expected that the statement could throw. Verify that when we
4278 have optimizations that simplify statements such that we prove
4279 that they cannot throw, that we update other data structures
4280 to match. */
4281 if (lookup_stmt_eh_region (stmt) >= 0)
4283 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4284 and they are updated on statements only after fixup_cfg
4285 is executed at beggining of expansion stage. */
4286 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
4288 error ("statement marked for throw, but doesn%'t");
4289 goto fail;
4291 if (!last_in_block && stmt_can_throw_internal (stmt))
4293 error ("statement marked for throw in middle of block");
4294 goto fail;
4298 return false;
4300 fail:
4301 debug_gimple_stmt (stmt);
4302 return true;
4306 /* Return true when the T can be shared. */
4308 static bool
4309 tree_node_can_be_shared (tree t)
4311 if (IS_TYPE_OR_DECL_P (t)
4312 || is_gimple_min_invariant (t)
4313 || TREE_CODE (t) == SSA_NAME
4314 || t == error_mark_node
4315 || TREE_CODE (t) == IDENTIFIER_NODE)
4316 return true;
4318 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4319 return true;
4321 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4322 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4323 || TREE_CODE (t) == COMPONENT_REF
4324 || TREE_CODE (t) == REALPART_EXPR
4325 || TREE_CODE (t) == IMAGPART_EXPR)
4326 t = TREE_OPERAND (t, 0);
4328 if (DECL_P (t))
4329 return true;
4331 return false;
4335 /* Called via walk_gimple_stmt. Verify tree sharing. */
4337 static tree
4338 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4341 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4343 if (tree_node_can_be_shared (*tp))
4345 *walk_subtrees = false;
4346 return NULL;
4349 if (pointer_set_insert (visited, *tp))
4350 return *tp;
4352 return NULL;
4356 static bool eh_error_found;
4357 static int
4358 verify_eh_throw_stmt_node (void **slot, void *data)
4360 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4361 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4363 if (!pointer_set_contains (visited, node->stmt))
4365 error ("Dead STMT in EH table");
4366 debug_gimple_stmt (node->stmt);
4367 eh_error_found = true;
4369 return 1;
4373 /* Verify the GIMPLE statements in every basic block. */
4375 void
4376 verify_stmts (void)
4378 basic_block bb;
4379 gimple_stmt_iterator gsi;
4380 bool err = false;
4381 struct pointer_set_t *visited, *visited_stmts;
4382 tree addr;
4383 struct walk_stmt_info wi;
4385 timevar_push (TV_TREE_STMT_VERIFY);
4386 visited = pointer_set_create ();
4387 visited_stmts = pointer_set_create ();
4389 memset (&wi, 0, sizeof (wi));
4390 wi.info = (void *) visited;
4392 FOR_EACH_BB (bb)
4394 gimple phi;
4395 size_t i;
4397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4399 phi = gsi_stmt (gsi);
4400 pointer_set_insert (visited_stmts, phi);
4401 if (gimple_bb (phi) != bb)
4403 error ("gimple_bb (phi) is set to a wrong basic block");
4404 err |= true;
4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
4409 tree t = gimple_phi_arg_def (phi, i);
4410 tree addr;
4412 if (!t)
4414 error ("missing PHI def");
4415 debug_gimple_stmt (phi);
4416 err |= true;
4417 continue;
4419 /* Addressable variables do have SSA_NAMEs but they
4420 are not considered gimple values. */
4421 else if (TREE_CODE (t) != SSA_NAME
4422 && TREE_CODE (t) != FUNCTION_DECL
4423 && !is_gimple_min_invariant (t))
4425 error ("PHI argument is not a GIMPLE value");
4426 debug_gimple_stmt (phi);
4427 debug_generic_expr (t);
4428 err |= true;
4431 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4432 if (addr)
4434 error ("incorrect sharing of tree nodes");
4435 debug_gimple_stmt (phi);
4436 debug_generic_expr (addr);
4437 err |= true;
4441 #ifdef ENABLE_TYPES_CHECKING
4442 if (verify_gimple_phi (phi))
4444 debug_gimple_stmt (phi);
4445 err |= true;
4447 #endif
4450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4452 gimple stmt = gsi_stmt (gsi);
4454 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4455 || gimple_code (stmt) == GIMPLE_BIND)
4457 error ("invalid GIMPLE statement");
4458 debug_gimple_stmt (stmt);
4459 err |= true;
4462 pointer_set_insert (visited_stmts, stmt);
4464 if (gimple_bb (stmt) != bb)
4466 error ("gimple_bb (stmt) is set to a wrong basic block");
4467 debug_gimple_stmt (stmt);
4468 err |= true;
4471 if (gimple_code (stmt) == GIMPLE_LABEL)
4473 tree decl = gimple_label_label (stmt);
4474 int uid = LABEL_DECL_UID (decl);
4476 if (uid == -1
4477 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4479 error ("incorrect entry in label_to_block_map.\n");
4480 err |= true;
4484 err |= verify_stmt (&gsi);
4486 #ifdef ENABLE_TYPES_CHECKING
4487 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4489 debug_gimple_stmt (stmt);
4490 err |= true;
4492 #endif
4493 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4494 if (addr)
4496 error ("incorrect sharing of tree nodes");
4497 debug_gimple_stmt (stmt);
4498 debug_generic_expr (addr);
4499 err |= true;
4501 gsi_next (&gsi);
4505 eh_error_found = false;
4506 if (get_eh_throw_stmt_table (cfun))
4507 htab_traverse (get_eh_throw_stmt_table (cfun),
4508 verify_eh_throw_stmt_node,
4509 visited_stmts);
4511 if (err | eh_error_found)
4512 internal_error ("verify_stmts failed");
4514 pointer_set_destroy (visited);
4515 pointer_set_destroy (visited_stmts);
4516 verify_histograms ();
4517 timevar_pop (TV_TREE_STMT_VERIFY);
4521 /* Verifies that the flow information is OK. */
4523 static int
4524 gimple_verify_flow_info (void)
4526 int err = 0;
4527 basic_block bb;
4528 gimple_stmt_iterator gsi;
4529 gimple stmt;
4530 edge e;
4531 edge_iterator ei;
4533 if (ENTRY_BLOCK_PTR->il.gimple)
4535 error ("ENTRY_BLOCK has IL associated with it");
4536 err = 1;
4539 if (EXIT_BLOCK_PTR->il.gimple)
4541 error ("EXIT_BLOCK has IL associated with it");
4542 err = 1;
4545 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4546 if (e->flags & EDGE_FALLTHRU)
4548 error ("fallthru to exit from bb %d", e->src->index);
4549 err = 1;
4552 FOR_EACH_BB (bb)
4554 bool found_ctrl_stmt = false;
4556 stmt = NULL;
4558 /* Skip labels on the start of basic block. */
4559 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4561 tree label;
4562 gimple prev_stmt = stmt;
4564 stmt = gsi_stmt (gsi);
4566 if (gimple_code (stmt) != GIMPLE_LABEL)
4567 break;
4569 label = gimple_label_label (stmt);
4570 if (prev_stmt && DECL_NONLOCAL (label))
4572 error ("nonlocal label ");
4573 print_generic_expr (stderr, label, 0);
4574 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4575 bb->index);
4576 err = 1;
4579 if (label_to_block (label) != bb)
4581 error ("label ");
4582 print_generic_expr (stderr, label, 0);
4583 fprintf (stderr, " to block does not match in bb %d",
4584 bb->index);
4585 err = 1;
4588 if (decl_function_context (label) != current_function_decl)
4590 error ("label ");
4591 print_generic_expr (stderr, label, 0);
4592 fprintf (stderr, " has incorrect context in bb %d",
4593 bb->index);
4594 err = 1;
4598 /* Verify that body of basic block BB is free of control flow. */
4599 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4601 gimple stmt = gsi_stmt (gsi);
4603 if (found_ctrl_stmt)
4605 error ("control flow in the middle of basic block %d",
4606 bb->index);
4607 err = 1;
4610 if (stmt_ends_bb_p (stmt))
4611 found_ctrl_stmt = true;
4613 if (gimple_code (stmt) == GIMPLE_LABEL)
4615 error ("label ");
4616 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4617 fprintf (stderr, " in the middle of basic block %d", bb->index);
4618 err = 1;
4622 gsi = gsi_last_bb (bb);
4623 if (gsi_end_p (gsi))
4624 continue;
4626 stmt = gsi_stmt (gsi);
4628 err |= verify_eh_edges (stmt);
4630 if (is_ctrl_stmt (stmt))
4632 FOR_EACH_EDGE (e, ei, bb->succs)
4633 if (e->flags & EDGE_FALLTHRU)
4635 error ("fallthru edge after a control statement in bb %d",
4636 bb->index);
4637 err = 1;
4641 if (gimple_code (stmt) != GIMPLE_COND)
4643 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4644 after anything else but if statement. */
4645 FOR_EACH_EDGE (e, ei, bb->succs)
4646 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4648 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4649 bb->index);
4650 err = 1;
4654 switch (gimple_code (stmt))
4656 case GIMPLE_COND:
4658 edge true_edge;
4659 edge false_edge;
4661 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4663 if (!true_edge
4664 || !false_edge
4665 || !(true_edge->flags & EDGE_TRUE_VALUE)
4666 || !(false_edge->flags & EDGE_FALSE_VALUE)
4667 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4668 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4669 || EDGE_COUNT (bb->succs) >= 3)
4671 error ("wrong outgoing edge flags at end of bb %d",
4672 bb->index);
4673 err = 1;
4676 break;
4678 case GIMPLE_GOTO:
4679 if (simple_goto_p (stmt))
4681 error ("explicit goto at end of bb %d", bb->index);
4682 err = 1;
4684 else
4686 /* FIXME. We should double check that the labels in the
4687 destination blocks have their address taken. */
4688 FOR_EACH_EDGE (e, ei, bb->succs)
4689 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4690 | EDGE_FALSE_VALUE))
4691 || !(e->flags & EDGE_ABNORMAL))
4693 error ("wrong outgoing edge flags at end of bb %d",
4694 bb->index);
4695 err = 1;
4698 break;
4700 case GIMPLE_RETURN:
4701 if (!single_succ_p (bb)
4702 || (single_succ_edge (bb)->flags
4703 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4706 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4707 err = 1;
4709 if (single_succ (bb) != EXIT_BLOCK_PTR)
4711 error ("return edge does not point to exit in bb %d",
4712 bb->index);
4713 err = 1;
4715 break;
4717 case GIMPLE_SWITCH:
4719 tree prev;
4720 edge e;
4721 size_t i, n;
4723 n = gimple_switch_num_labels (stmt);
4725 /* Mark all the destination basic blocks. */
4726 for (i = 0; i < n; ++i)
4728 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4729 basic_block label_bb = label_to_block (lab);
4730 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4731 label_bb->aux = (void *)1;
4734 /* Verify that the case labels are sorted. */
4735 prev = gimple_switch_label (stmt, 0);
4736 for (i = 1; i < n; ++i)
4738 tree c = gimple_switch_label (stmt, i);
4739 if (!CASE_LOW (c))
4741 error ("found default case not at the start of "
4742 "case vector");
4743 err = 1;
4744 continue;
4746 if (CASE_LOW (prev)
4747 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4749 error ("case labels not sorted: ");
4750 print_generic_expr (stderr, prev, 0);
4751 fprintf (stderr," is greater than ");
4752 print_generic_expr (stderr, c, 0);
4753 fprintf (stderr," but comes before it.\n");
4754 err = 1;
4756 prev = c;
4758 /* VRP will remove the default case if it can prove it will
4759 never be executed. So do not verify there always exists
4760 a default case here. */
4762 FOR_EACH_EDGE (e, ei, bb->succs)
4764 if (!e->dest->aux)
4766 error ("extra outgoing edge %d->%d",
4767 bb->index, e->dest->index);
4768 err = 1;
4771 e->dest->aux = (void *)2;
4772 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4773 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4775 error ("wrong outgoing edge flags at end of bb %d",
4776 bb->index);
4777 err = 1;
4781 /* Check that we have all of them. */
4782 for (i = 0; i < n; ++i)
4784 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4785 basic_block label_bb = label_to_block (lab);
4787 if (label_bb->aux != (void *)2)
4789 error ("missing edge %i->%i", bb->index, label_bb->index);
4790 err = 1;
4794 FOR_EACH_EDGE (e, ei, bb->succs)
4795 e->dest->aux = (void *)0;
4798 default: ;
4802 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4803 verify_dominators (CDI_DOMINATORS);
4805 return err;
4809 /* Updates phi nodes after creating a forwarder block joined
4810 by edge FALLTHRU. */
4812 static void
4813 gimple_make_forwarder_block (edge fallthru)
4815 edge e;
4816 edge_iterator ei;
4817 basic_block dummy, bb;
4818 tree var;
4819 gimple_stmt_iterator gsi;
4821 dummy = fallthru->src;
4822 bb = fallthru->dest;
4824 if (single_pred_p (bb))
4825 return;
4827 /* If we redirected a branch we must create new PHI nodes at the
4828 start of BB. */
4829 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4831 gimple phi, new_phi;
4833 phi = gsi_stmt (gsi);
4834 var = gimple_phi_result (phi);
4835 new_phi = create_phi_node (var, bb);
4836 SSA_NAME_DEF_STMT (var) = new_phi;
4837 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4838 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4841 /* Add the arguments we have stored on edges. */
4842 FOR_EACH_EDGE (e, ei, bb->preds)
4844 if (e == fallthru)
4845 continue;
4847 flush_pending_stmts (e);
4852 /* Return a non-special label in the head of basic block BLOCK.
4853 Create one if it doesn't exist. */
4855 tree
4856 gimple_block_label (basic_block bb)
4858 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4859 bool first = true;
4860 tree label;
4861 gimple stmt;
4863 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4865 stmt = gsi_stmt (i);
4866 if (gimple_code (stmt) != GIMPLE_LABEL)
4867 break;
4868 label = gimple_label_label (stmt);
4869 if (!DECL_NONLOCAL (label))
4871 if (!first)
4872 gsi_move_before (&i, &s);
4873 return label;
4877 label = create_artificial_label (UNKNOWN_LOCATION);
4878 stmt = gimple_build_label (label);
4879 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4880 return label;
4884 /* Attempt to perform edge redirection by replacing a possibly complex
4885 jump instruction by a goto or by removing the jump completely.
4886 This can apply only if all edges now point to the same block. The
4887 parameters and return values are equivalent to
4888 redirect_edge_and_branch. */
4890 static edge
4891 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4893 basic_block src = e->src;
4894 gimple_stmt_iterator i;
4895 gimple stmt;
4897 /* We can replace or remove a complex jump only when we have exactly
4898 two edges. */
4899 if (EDGE_COUNT (src->succs) != 2
4900 /* Verify that all targets will be TARGET. Specifically, the
4901 edge that is not E must also go to TARGET. */
4902 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4903 return NULL;
4905 i = gsi_last_bb (src);
4906 if (gsi_end_p (i))
4907 return NULL;
4909 stmt = gsi_stmt (i);
4911 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4913 gsi_remove (&i, true);
4914 e = ssa_redirect_edge (e, target);
4915 e->flags = EDGE_FALLTHRU;
4916 return e;
4919 return NULL;
4923 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4924 edge representing the redirected branch. */
4926 static edge
4927 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4929 basic_block bb = e->src;
4930 gimple_stmt_iterator gsi;
4931 edge ret;
4932 gimple stmt;
4934 if (e->flags & EDGE_ABNORMAL)
4935 return NULL;
4937 if (e->src != ENTRY_BLOCK_PTR
4938 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4939 return ret;
4941 if (e->dest == dest)
4942 return NULL;
4944 if (e->flags & EDGE_EH)
4945 return redirect_eh_edge (e, dest);
4947 gsi = gsi_last_bb (bb);
4948 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4950 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4952 case GIMPLE_COND:
4953 /* For COND_EXPR, we only need to redirect the edge. */
4954 break;
4956 case GIMPLE_GOTO:
4957 /* No non-abnormal edges should lead from a non-simple goto, and
4958 simple ones should be represented implicitly. */
4959 gcc_unreachable ();
4961 case GIMPLE_SWITCH:
4963 tree label = gimple_block_label (dest);
4964 tree cases = get_cases_for_edge (e, stmt);
4966 /* If we have a list of cases associated with E, then use it
4967 as it's a lot faster than walking the entire case vector. */
4968 if (cases)
4970 edge e2 = find_edge (e->src, dest);
4971 tree last, first;
4973 first = cases;
4974 while (cases)
4976 last = cases;
4977 CASE_LABEL (cases) = label;
4978 cases = TREE_CHAIN (cases);
4981 /* If there was already an edge in the CFG, then we need
4982 to move all the cases associated with E to E2. */
4983 if (e2)
4985 tree cases2 = get_cases_for_edge (e2, stmt);
4987 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4988 TREE_CHAIN (cases2) = first;
4991 else
4993 size_t i, n = gimple_switch_num_labels (stmt);
4995 for (i = 0; i < n; i++)
4997 tree elt = gimple_switch_label (stmt, i);
4998 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4999 CASE_LABEL (elt) = label;
5003 break;
5006 case GIMPLE_RETURN:
5007 gsi_remove (&gsi, true);
5008 e->flags |= EDGE_FALLTHRU;
5009 break;
5011 case GIMPLE_OMP_RETURN:
5012 case GIMPLE_OMP_CONTINUE:
5013 case GIMPLE_OMP_SECTIONS_SWITCH:
5014 case GIMPLE_OMP_FOR:
5015 /* The edges from OMP constructs can be simply redirected. */
5016 break;
5018 default:
5019 /* Otherwise it must be a fallthru edge, and we don't need to
5020 do anything besides redirecting it. */
5021 gcc_assert (e->flags & EDGE_FALLTHRU);
5022 break;
5025 /* Update/insert PHI nodes as necessary. */
5027 /* Now update the edges in the CFG. */
5028 e = ssa_redirect_edge (e, dest);
5030 return e;
5033 /* Returns true if it is possible to remove edge E by redirecting
5034 it to the destination of the other edge from E->src. */
5036 static bool
5037 gimple_can_remove_branch_p (const_edge e)
5039 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5040 return false;
5042 return true;
5045 /* Simple wrapper, as we can always redirect fallthru edges. */
5047 static basic_block
5048 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5050 e = gimple_redirect_edge_and_branch (e, dest);
5051 gcc_assert (e);
5053 return NULL;
5057 /* Splits basic block BB after statement STMT (but at least after the
5058 labels). If STMT is NULL, BB is split just after the labels. */
5060 static basic_block
5061 gimple_split_block (basic_block bb, void *stmt)
5063 gimple_stmt_iterator gsi;
5064 gimple_stmt_iterator gsi_tgt;
5065 gimple act;
5066 gimple_seq list;
5067 basic_block new_bb;
5068 edge e;
5069 edge_iterator ei;
5071 new_bb = create_empty_bb (bb);
5073 /* Redirect the outgoing edges. */
5074 new_bb->succs = bb->succs;
5075 bb->succs = NULL;
5076 FOR_EACH_EDGE (e, ei, new_bb->succs)
5077 e->src = new_bb;
5079 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5080 stmt = NULL;
5082 /* Move everything from GSI to the new basic block. */
5083 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5085 act = gsi_stmt (gsi);
5086 if (gimple_code (act) == GIMPLE_LABEL)
5087 continue;
5089 if (!stmt)
5090 break;
5092 if (stmt == act)
5094 gsi_next (&gsi);
5095 break;
5099 if (gsi_end_p (gsi))
5100 return new_bb;
5102 /* Split the statement list - avoid re-creating new containers as this
5103 brings ugly quadratic memory consumption in the inliner.
5104 (We are still quadratic since we need to update stmt BB pointers,
5105 sadly.) */
5106 list = gsi_split_seq_before (&gsi);
5107 set_bb_seq (new_bb, list);
5108 for (gsi_tgt = gsi_start (list);
5109 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5110 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5112 return new_bb;
5116 /* Moves basic block BB after block AFTER. */
5118 static bool
5119 gimple_move_block_after (basic_block bb, basic_block after)
5121 if (bb->prev_bb == after)
5122 return true;
5124 unlink_block (bb);
5125 link_block (bb, after);
5127 return true;
5131 /* Return true if basic_block can be duplicated. */
5133 static bool
5134 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5136 return true;
5139 /* Create a duplicate of the basic block BB. NOTE: This does not
5140 preserve SSA form. */
5142 static basic_block
5143 gimple_duplicate_bb (basic_block bb)
5145 basic_block new_bb;
5146 gimple_stmt_iterator gsi, gsi_tgt;
5147 gimple_seq phis = phi_nodes (bb);
5148 gimple phi, stmt, copy;
5150 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5152 /* Copy the PHI nodes. We ignore PHI node arguments here because
5153 the incoming edges have not been setup yet. */
5154 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5156 phi = gsi_stmt (gsi);
5157 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5158 create_new_def_for (gimple_phi_result (copy), copy,
5159 gimple_phi_result_ptr (copy));
5162 gsi_tgt = gsi_start_bb (new_bb);
5163 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5165 def_operand_p def_p;
5166 ssa_op_iter op_iter;
5167 int region;
5169 stmt = gsi_stmt (gsi);
5170 if (gimple_code (stmt) == GIMPLE_LABEL)
5171 continue;
5173 /* Create a new copy of STMT and duplicate STMT's virtual
5174 operands. */
5175 copy = gimple_copy (stmt);
5176 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5177 region = lookup_stmt_eh_region (stmt);
5178 if (region >= 0)
5179 add_stmt_to_eh_region (copy, region);
5180 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5182 /* Create new names for all the definitions created by COPY and
5183 add replacement mappings for each new name. */
5184 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5185 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5188 return new_bb;
5191 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5193 static void
5194 add_phi_args_after_copy_edge (edge e_copy)
5196 basic_block bb, bb_copy = e_copy->src, dest;
5197 edge e;
5198 edge_iterator ei;
5199 gimple phi, phi_copy;
5200 tree def;
5201 gimple_stmt_iterator psi, psi_copy;
5203 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5204 return;
5206 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5208 if (e_copy->dest->flags & BB_DUPLICATED)
5209 dest = get_bb_original (e_copy->dest);
5210 else
5211 dest = e_copy->dest;
5213 e = find_edge (bb, dest);
5214 if (!e)
5216 /* During loop unrolling the target of the latch edge is copied.
5217 In this case we are not looking for edge to dest, but to
5218 duplicated block whose original was dest. */
5219 FOR_EACH_EDGE (e, ei, bb->succs)
5221 if ((e->dest->flags & BB_DUPLICATED)
5222 && get_bb_original (e->dest) == dest)
5223 break;
5226 gcc_assert (e != NULL);
5229 for (psi = gsi_start_phis (e->dest),
5230 psi_copy = gsi_start_phis (e_copy->dest);
5231 !gsi_end_p (psi);
5232 gsi_next (&psi), gsi_next (&psi_copy))
5234 phi = gsi_stmt (psi);
5235 phi_copy = gsi_stmt (psi_copy);
5236 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5237 add_phi_arg (phi_copy, def, e_copy);
5242 /* Basic block BB_COPY was created by code duplication. Add phi node
5243 arguments for edges going out of BB_COPY. The blocks that were
5244 duplicated have BB_DUPLICATED set. */
5246 void
5247 add_phi_args_after_copy_bb (basic_block bb_copy)
5249 edge e_copy;
5250 edge_iterator ei;
5252 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5254 add_phi_args_after_copy_edge (e_copy);
5258 /* Blocks in REGION_COPY array of length N_REGION were created by
5259 duplication of basic blocks. Add phi node arguments for edges
5260 going from these blocks. If E_COPY is not NULL, also add
5261 phi node arguments for its destination.*/
5263 void
5264 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5265 edge e_copy)
5267 unsigned i;
5269 for (i = 0; i < n_region; i++)
5270 region_copy[i]->flags |= BB_DUPLICATED;
5272 for (i = 0; i < n_region; i++)
5273 add_phi_args_after_copy_bb (region_copy[i]);
5274 if (e_copy)
5275 add_phi_args_after_copy_edge (e_copy);
5277 for (i = 0; i < n_region; i++)
5278 region_copy[i]->flags &= ~BB_DUPLICATED;
5281 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5282 important exit edge EXIT. By important we mean that no SSA name defined
5283 inside region is live over the other exit edges of the region. All entry
5284 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5285 to the duplicate of the region. SSA form, dominance and loop information
5286 is updated. The new basic blocks are stored to REGION_COPY in the same
5287 order as they had in REGION, provided that REGION_COPY is not NULL.
5288 The function returns false if it is unable to copy the region,
5289 true otherwise. */
5291 bool
5292 gimple_duplicate_sese_region (edge entry, edge exit,
5293 basic_block *region, unsigned n_region,
5294 basic_block *region_copy)
5296 unsigned i;
5297 bool free_region_copy = false, copying_header = false;
5298 struct loop *loop = entry->dest->loop_father;
5299 edge exit_copy;
5300 VEC (basic_block, heap) *doms;
5301 edge redirected;
5302 int total_freq = 0, entry_freq = 0;
5303 gcov_type total_count = 0, entry_count = 0;
5305 if (!can_copy_bbs_p (region, n_region))
5306 return false;
5308 /* Some sanity checking. Note that we do not check for all possible
5309 missuses of the functions. I.e. if you ask to copy something weird,
5310 it will work, but the state of structures probably will not be
5311 correct. */
5312 for (i = 0; i < n_region; i++)
5314 /* We do not handle subloops, i.e. all the blocks must belong to the
5315 same loop. */
5316 if (region[i]->loop_father != loop)
5317 return false;
5319 if (region[i] != entry->dest
5320 && region[i] == loop->header)
5321 return false;
5324 set_loop_copy (loop, loop);
5326 /* In case the function is used for loop header copying (which is the primary
5327 use), ensure that EXIT and its copy will be new latch and entry edges. */
5328 if (loop->header == entry->dest)
5330 copying_header = true;
5331 set_loop_copy (loop, loop_outer (loop));
5333 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5334 return false;
5336 for (i = 0; i < n_region; i++)
5337 if (region[i] != exit->src
5338 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5339 return false;
5342 if (!region_copy)
5344 region_copy = XNEWVEC (basic_block, n_region);
5345 free_region_copy = true;
5348 gcc_assert (!need_ssa_update_p (cfun));
5350 /* Record blocks outside the region that are dominated by something
5351 inside. */
5352 doms = NULL;
5353 initialize_original_copy_tables ();
5355 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5357 if (entry->dest->count)
5359 total_count = entry->dest->count;
5360 entry_count = entry->count;
5361 /* Fix up corner cases, to avoid division by zero or creation of negative
5362 frequencies. */
5363 if (entry_count > total_count)
5364 entry_count = total_count;
5366 else
5368 total_freq = entry->dest->frequency;
5369 entry_freq = EDGE_FREQUENCY (entry);
5370 /* Fix up corner cases, to avoid division by zero or creation of negative
5371 frequencies. */
5372 if (total_freq == 0)
5373 total_freq = 1;
5374 else if (entry_freq > total_freq)
5375 entry_freq = total_freq;
5378 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5379 split_edge_bb_loc (entry));
5380 if (total_count)
5382 scale_bbs_frequencies_gcov_type (region, n_region,
5383 total_count - entry_count,
5384 total_count);
5385 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5386 total_count);
5388 else
5390 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5391 total_freq);
5392 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5395 if (copying_header)
5397 loop->header = exit->dest;
5398 loop->latch = exit->src;
5401 /* Redirect the entry and add the phi node arguments. */
5402 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5403 gcc_assert (redirected != NULL);
5404 flush_pending_stmts (entry);
5406 /* Concerning updating of dominators: We must recount dominators
5407 for entry block and its copy. Anything that is outside of the
5408 region, but was dominated by something inside needs recounting as
5409 well. */
5410 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5411 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5412 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5413 VEC_free (basic_block, heap, doms);
5415 /* Add the other PHI node arguments. */
5416 add_phi_args_after_copy (region_copy, n_region, NULL);
5418 /* Update the SSA web. */
5419 update_ssa (TODO_update_ssa);
5421 if (free_region_copy)
5422 free (region_copy);
5424 free_original_copy_tables ();
5425 return true;
5428 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5429 are stored to REGION_COPY in the same order in that they appear
5430 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5431 the region, EXIT an exit from it. The condition guarding EXIT
5432 is moved to ENTRY. Returns true if duplication succeeds, false
5433 otherwise.
5435 For example,
5437 some_code;
5438 if (cond)
5440 else
5443 is transformed to
5445 if (cond)
5447 some_code;
5450 else
5452 some_code;
5457 bool
5458 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5459 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5460 basic_block *region_copy ATTRIBUTE_UNUSED)
5462 unsigned i;
5463 bool free_region_copy = false;
5464 struct loop *loop = exit->dest->loop_father;
5465 struct loop *orig_loop = entry->dest->loop_father;
5466 basic_block switch_bb, entry_bb, nentry_bb;
5467 VEC (basic_block, heap) *doms;
5468 int total_freq = 0, exit_freq = 0;
5469 gcov_type total_count = 0, exit_count = 0;
5470 edge exits[2], nexits[2], e;
5471 gimple_stmt_iterator gsi;
5472 gimple cond_stmt;
5473 edge sorig, snew;
5475 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5476 exits[0] = exit;
5477 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5479 if (!can_copy_bbs_p (region, n_region))
5480 return false;
5482 /* Some sanity checking. Note that we do not check for all possible
5483 missuses of the functions. I.e. if you ask to copy something weird
5484 (e.g., in the example, if there is a jump from inside to the middle
5485 of some_code, or come_code defines some of the values used in cond)
5486 it will work, but the resulting code will not be correct. */
5487 for (i = 0; i < n_region; i++)
5489 /* We do not handle subloops, i.e. all the blocks must belong to the
5490 same loop. */
5491 if (region[i]->loop_father != orig_loop)
5492 return false;
5494 if (region[i] == orig_loop->latch)
5495 return false;
5498 initialize_original_copy_tables ();
5499 set_loop_copy (orig_loop, loop);
5501 if (!region_copy)
5503 region_copy = XNEWVEC (basic_block, n_region);
5504 free_region_copy = true;
5507 gcc_assert (!need_ssa_update_p (cfun));
5509 /* Record blocks outside the region that are dominated by something
5510 inside. */
5511 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5513 if (exit->src->count)
5515 total_count = exit->src->count;
5516 exit_count = exit->count;
5517 /* Fix up corner cases, to avoid division by zero or creation of negative
5518 frequencies. */
5519 if (exit_count > total_count)
5520 exit_count = total_count;
5522 else
5524 total_freq = exit->src->frequency;
5525 exit_freq = EDGE_FREQUENCY (exit);
5526 /* Fix up corner cases, to avoid division by zero or creation of negative
5527 frequencies. */
5528 if (total_freq == 0)
5529 total_freq = 1;
5530 if (exit_freq > total_freq)
5531 exit_freq = total_freq;
5534 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5535 split_edge_bb_loc (exit));
5536 if (total_count)
5538 scale_bbs_frequencies_gcov_type (region, n_region,
5539 total_count - exit_count,
5540 total_count);
5541 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5542 total_count);
5544 else
5546 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5547 total_freq);
5548 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5551 /* Create the switch block, and put the exit condition to it. */
5552 entry_bb = entry->dest;
5553 nentry_bb = get_bb_copy (entry_bb);
5554 if (!last_stmt (entry->src)
5555 || !stmt_ends_bb_p (last_stmt (entry->src)))
5556 switch_bb = entry->src;
5557 else
5558 switch_bb = split_edge (entry);
5559 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5561 gsi = gsi_last_bb (switch_bb);
5562 cond_stmt = last_stmt (exit->src);
5563 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5564 cond_stmt = gimple_copy (cond_stmt);
5565 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5566 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5567 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5569 sorig = single_succ_edge (switch_bb);
5570 sorig->flags = exits[1]->flags;
5571 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5573 /* Register the new edge from SWITCH_BB in loop exit lists. */
5574 rescan_loop_exit (snew, true, false);
5576 /* Add the PHI node arguments. */
5577 add_phi_args_after_copy (region_copy, n_region, snew);
5579 /* Get rid of now superfluous conditions and associated edges (and phi node
5580 arguments). */
5581 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5582 PENDING_STMT (e) = NULL;
5583 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5584 PENDING_STMT (e) = NULL;
5586 /* Anything that is outside of the region, but was dominated by something
5587 inside needs to update dominance info. */
5588 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5589 VEC_free (basic_block, heap, doms);
5591 /* Update the SSA web. */
5592 update_ssa (TODO_update_ssa);
5594 if (free_region_copy)
5595 free (region_copy);
5597 free_original_copy_tables ();
5598 return true;
5601 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5602 adding blocks when the dominator traversal reaches EXIT. This
5603 function silently assumes that ENTRY strictly dominates EXIT. */
5605 void
5606 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5607 VEC(basic_block,heap) **bbs_p)
5609 basic_block son;
5611 for (son = first_dom_son (CDI_DOMINATORS, entry);
5612 son;
5613 son = next_dom_son (CDI_DOMINATORS, son))
5615 VEC_safe_push (basic_block, heap, *bbs_p, son);
5616 if (son != exit)
5617 gather_blocks_in_sese_region (son, exit, bbs_p);
5621 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5622 The duplicates are recorded in VARS_MAP. */
5624 static void
5625 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5626 tree to_context)
5628 tree t = *tp, new_t;
5629 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5630 void **loc;
5632 if (DECL_CONTEXT (t) == to_context)
5633 return;
5635 loc = pointer_map_contains (vars_map, t);
5637 if (!loc)
5639 loc = pointer_map_insert (vars_map, t);
5641 if (SSA_VAR_P (t))
5643 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5644 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5646 else
5648 gcc_assert (TREE_CODE (t) == CONST_DECL);
5649 new_t = copy_node (t);
5651 DECL_CONTEXT (new_t) = to_context;
5653 *loc = new_t;
5655 else
5656 new_t = (tree) *loc;
5658 *tp = new_t;
5662 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5663 VARS_MAP maps old ssa names and var_decls to the new ones. */
5665 static tree
5666 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5667 tree to_context)
5669 void **loc;
5670 tree new_name, decl = SSA_NAME_VAR (name);
5672 gcc_assert (is_gimple_reg (name));
5674 loc = pointer_map_contains (vars_map, name);
5676 if (!loc)
5678 replace_by_duplicate_decl (&decl, vars_map, to_context);
5680 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5681 if (gimple_in_ssa_p (cfun))
5682 add_referenced_var (decl);
5684 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5685 if (SSA_NAME_IS_DEFAULT_DEF (name))
5686 set_default_def (decl, new_name);
5687 pop_cfun ();
5689 loc = pointer_map_insert (vars_map, name);
5690 *loc = new_name;
5692 else
5693 new_name = (tree) *loc;
5695 return new_name;
5698 struct move_stmt_d
5700 tree orig_block;
5701 tree new_block;
5702 tree from_context;
5703 tree to_context;
5704 struct pointer_map_t *vars_map;
5705 htab_t new_label_map;
5706 bool remap_decls_p;
5709 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5710 contained in *TP if it has been ORIG_BLOCK previously and change the
5711 DECL_CONTEXT of every local variable referenced in *TP. */
5713 static tree
5714 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5716 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5717 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5718 tree t = *tp;
5720 if (EXPR_P (t))
5721 /* We should never have TREE_BLOCK set on non-statements. */
5722 gcc_assert (!TREE_BLOCK (t));
5724 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5726 if (TREE_CODE (t) == SSA_NAME)
5727 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5728 else if (TREE_CODE (t) == LABEL_DECL)
5730 if (p->new_label_map)
5732 struct tree_map in, *out;
5733 in.base.from = t;
5734 out = (struct tree_map *)
5735 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5736 if (out)
5737 *tp = t = out->to;
5740 DECL_CONTEXT (t) = p->to_context;
5742 else if (p->remap_decls_p)
5744 /* Replace T with its duplicate. T should no longer appear in the
5745 parent function, so this looks wasteful; however, it may appear
5746 in referenced_vars, and more importantly, as virtual operands of
5747 statements, and in alias lists of other variables. It would be
5748 quite difficult to expunge it from all those places. ??? It might
5749 suffice to do this for addressable variables. */
5750 if ((TREE_CODE (t) == VAR_DECL
5751 && !is_global_var (t))
5752 || TREE_CODE (t) == CONST_DECL)
5753 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5755 if (SSA_VAR_P (t)
5756 && gimple_in_ssa_p (cfun))
5758 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5759 add_referenced_var (*tp);
5760 pop_cfun ();
5763 *walk_subtrees = 0;
5765 else if (TYPE_P (t))
5766 *walk_subtrees = 0;
5768 return NULL_TREE;
5771 /* Like move_stmt_op, but for gimple statements.
5773 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5774 contained in the current statement in *GSI_P and change the
5775 DECL_CONTEXT of every local variable referenced in the current
5776 statement. */
5778 static tree
5779 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5780 struct walk_stmt_info *wi)
5782 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5783 gimple stmt = gsi_stmt (*gsi_p);
5784 tree block = gimple_block (stmt);
5786 if (p->orig_block == NULL_TREE
5787 || block == p->orig_block
5788 || block == NULL_TREE)
5789 gimple_set_block (stmt, p->new_block);
5790 #ifdef ENABLE_CHECKING
5791 else if (block != p->new_block)
5793 while (block && block != p->orig_block)
5794 block = BLOCK_SUPERCONTEXT (block);
5795 gcc_assert (block);
5797 #endif
5799 if (is_gimple_omp (stmt)
5800 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5801 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5803 /* Do not remap variables inside OMP directives. Variables
5804 referenced in clauses and directive header belong to the
5805 parent function and should not be moved into the child
5806 function. */
5807 bool save_remap_decls_p = p->remap_decls_p;
5808 p->remap_decls_p = false;
5809 *handled_ops_p = true;
5811 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5813 p->remap_decls_p = save_remap_decls_p;
5816 return NULL_TREE;
5819 /* Marks virtual operands of all statements in basic blocks BBS for
5820 renaming. */
5822 void
5823 mark_virtual_ops_in_bb (basic_block bb)
5825 gimple_stmt_iterator gsi;
5827 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5828 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5831 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5834 /* Move basic block BB from function CFUN to function DEST_FN. The
5835 block is moved out of the original linked list and placed after
5836 block AFTER in the new list. Also, the block is removed from the
5837 original array of blocks and placed in DEST_FN's array of blocks.
5838 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5839 updated to reflect the moved edges.
5841 The local variables are remapped to new instances, VARS_MAP is used
5842 to record the mapping. */
5844 static void
5845 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5846 basic_block after, bool update_edge_count_p,
5847 struct move_stmt_d *d, int eh_offset)
5849 struct control_flow_graph *cfg;
5850 edge_iterator ei;
5851 edge e;
5852 gimple_stmt_iterator si;
5853 unsigned old_len, new_len;
5855 /* Remove BB from dominance structures. */
5856 delete_from_dominance_info (CDI_DOMINATORS, bb);
5857 if (current_loops)
5858 remove_bb_from_loops (bb);
5860 /* Link BB to the new linked list. */
5861 move_block_after (bb, after);
5863 /* Update the edge count in the corresponding flowgraphs. */
5864 if (update_edge_count_p)
5865 FOR_EACH_EDGE (e, ei, bb->succs)
5867 cfun->cfg->x_n_edges--;
5868 dest_cfun->cfg->x_n_edges++;
5871 /* Remove BB from the original basic block array. */
5872 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5873 cfun->cfg->x_n_basic_blocks--;
5875 /* Grow DEST_CFUN's basic block array if needed. */
5876 cfg = dest_cfun->cfg;
5877 cfg->x_n_basic_blocks++;
5878 if (bb->index >= cfg->x_last_basic_block)
5879 cfg->x_last_basic_block = bb->index + 1;
5881 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5882 if ((unsigned) cfg->x_last_basic_block >= old_len)
5884 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5885 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5886 new_len);
5889 VEC_replace (basic_block, cfg->x_basic_block_info,
5890 bb->index, bb);
5892 /* Remap the variables in phi nodes. */
5893 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5895 gimple phi = gsi_stmt (si);
5896 use_operand_p use;
5897 tree op = PHI_RESULT (phi);
5898 ssa_op_iter oi;
5900 if (!is_gimple_reg (op))
5902 /* Remove the phi nodes for virtual operands (alias analysis will be
5903 run for the new function, anyway). */
5904 remove_phi_node (&si, true);
5905 continue;
5908 SET_PHI_RESULT (phi,
5909 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5910 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5912 op = USE_FROM_PTR (use);
5913 if (TREE_CODE (op) == SSA_NAME)
5914 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5917 gsi_next (&si);
5920 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5922 gimple stmt = gsi_stmt (si);
5923 int region;
5924 struct walk_stmt_info wi;
5926 memset (&wi, 0, sizeof (wi));
5927 wi.info = d;
5928 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5930 if (gimple_code (stmt) == GIMPLE_LABEL)
5932 tree label = gimple_label_label (stmt);
5933 int uid = LABEL_DECL_UID (label);
5935 gcc_assert (uid > -1);
5937 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5938 if (old_len <= (unsigned) uid)
5940 new_len = 3 * uid / 2 + 1;
5941 VEC_safe_grow_cleared (basic_block, gc,
5942 cfg->x_label_to_block_map, new_len);
5945 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5946 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5948 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5950 if (uid >= dest_cfun->cfg->last_label_uid)
5951 dest_cfun->cfg->last_label_uid = uid + 1;
5953 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5954 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5956 region = lookup_stmt_eh_region (stmt);
5957 if (region >= 0)
5959 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5960 remove_stmt_from_eh_region (stmt);
5961 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5962 gimple_remove_stmt_histograms (cfun, stmt);
5965 /* We cannot leave any operands allocated from the operand caches of
5966 the current function. */
5967 free_stmt_operands (stmt);
5968 push_cfun (dest_cfun);
5969 update_stmt (stmt);
5970 pop_cfun ();
5973 FOR_EACH_EDGE (e, ei, bb->succs)
5974 if (e->goto_locus)
5976 tree block = e->goto_block;
5977 if (d->orig_block == NULL_TREE
5978 || block == d->orig_block)
5979 e->goto_block = d->new_block;
5980 #ifdef ENABLE_CHECKING
5981 else if (block != d->new_block)
5983 while (block && block != d->orig_block)
5984 block = BLOCK_SUPERCONTEXT (block);
5985 gcc_assert (block);
5987 #endif
5991 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5992 the outermost EH region. Use REGION as the incoming base EH region. */
5994 static int
5995 find_outermost_region_in_block (struct function *src_cfun,
5996 basic_block bb, int region)
5998 gimple_stmt_iterator si;
6000 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6002 gimple stmt = gsi_stmt (si);
6003 int stmt_region;
6005 if (gimple_code (stmt) == GIMPLE_RESX)
6006 stmt_region = gimple_resx_region (stmt);
6007 else
6008 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
6009 if (stmt_region > 0)
6011 if (region < 0)
6012 region = stmt_region;
6013 else if (stmt_region != region)
6015 region = eh_region_outermost (src_cfun, stmt_region, region);
6016 gcc_assert (region != -1);
6021 return region;
6024 static tree
6025 new_label_mapper (tree decl, void *data)
6027 htab_t hash = (htab_t) data;
6028 struct tree_map *m;
6029 void **slot;
6031 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6033 m = XNEW (struct tree_map);
6034 m->hash = DECL_UID (decl);
6035 m->base.from = decl;
6036 m->to = create_artificial_label (UNKNOWN_LOCATION);
6037 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6038 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6039 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6041 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6042 gcc_assert (*slot == NULL);
6044 *slot = m;
6046 return m->to;
6049 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6050 subblocks. */
6052 static void
6053 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6054 tree to_context)
6056 tree *tp, t;
6058 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6060 t = *tp;
6061 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6062 continue;
6063 replace_by_duplicate_decl (&t, vars_map, to_context);
6064 if (t != *tp)
6066 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6068 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6069 DECL_HAS_VALUE_EXPR_P (t) = 1;
6071 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6072 *tp = t;
6076 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6077 replace_block_vars_by_duplicates (block, vars_map, to_context);
6080 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6081 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6082 single basic block in the original CFG and the new basic block is
6083 returned. DEST_CFUN must not have a CFG yet.
6085 Note that the region need not be a pure SESE region. Blocks inside
6086 the region may contain calls to abort/exit. The only restriction
6087 is that ENTRY_BB should be the only entry point and it must
6088 dominate EXIT_BB.
6090 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6091 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6092 to the new function.
6094 All local variables referenced in the region are assumed to be in
6095 the corresponding BLOCK_VARS and unexpanded variable lists
6096 associated with DEST_CFUN. */
6098 basic_block
6099 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6100 basic_block exit_bb, tree orig_block)
6102 VEC(basic_block,heap) *bbs, *dom_bbs;
6103 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6104 basic_block after, bb, *entry_pred, *exit_succ, abb;
6105 struct function *saved_cfun = cfun;
6106 int *entry_flag, *exit_flag, eh_offset;
6107 unsigned *entry_prob, *exit_prob;
6108 unsigned i, num_entry_edges, num_exit_edges;
6109 edge e;
6110 edge_iterator ei;
6111 htab_t new_label_map;
6112 struct pointer_map_t *vars_map;
6113 struct loop *loop = entry_bb->loop_father;
6114 struct move_stmt_d d;
6116 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6117 region. */
6118 gcc_assert (entry_bb != exit_bb
6119 && (!exit_bb
6120 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6122 /* Collect all the blocks in the region. Manually add ENTRY_BB
6123 because it won't be added by dfs_enumerate_from. */
6124 bbs = NULL;
6125 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6126 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6128 /* The blocks that used to be dominated by something in BBS will now be
6129 dominated by the new block. */
6130 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6131 VEC_address (basic_block, bbs),
6132 VEC_length (basic_block, bbs));
6134 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6135 the predecessor edges to ENTRY_BB and the successor edges to
6136 EXIT_BB so that we can re-attach them to the new basic block that
6137 will replace the region. */
6138 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6139 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6140 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6141 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6142 i = 0;
6143 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6145 entry_prob[i] = e->probability;
6146 entry_flag[i] = e->flags;
6147 entry_pred[i++] = e->src;
6148 remove_edge (e);
6151 if (exit_bb)
6153 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6154 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6155 sizeof (basic_block));
6156 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6157 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6158 i = 0;
6159 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6161 exit_prob[i] = e->probability;
6162 exit_flag[i] = e->flags;
6163 exit_succ[i++] = e->dest;
6164 remove_edge (e);
6167 else
6169 num_exit_edges = 0;
6170 exit_succ = NULL;
6171 exit_flag = NULL;
6172 exit_prob = NULL;
6175 /* Switch context to the child function to initialize DEST_FN's CFG. */
6176 gcc_assert (dest_cfun->cfg == NULL);
6177 push_cfun (dest_cfun);
6179 init_empty_tree_cfg ();
6181 /* Initialize EH information for the new function. */
6182 eh_offset = 0;
6183 new_label_map = NULL;
6184 if (saved_cfun->eh)
6186 int region = -1;
6188 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6189 region = find_outermost_region_in_block (saved_cfun, bb, region);
6191 init_eh_for_function ();
6192 if (region != -1)
6194 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6195 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6196 new_label_map, region, 0);
6200 pop_cfun ();
6202 /* Move blocks from BBS into DEST_CFUN. */
6203 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6204 after = dest_cfun->cfg->x_entry_block_ptr;
6205 vars_map = pointer_map_create ();
6207 memset (&d, 0, sizeof (d));
6208 d.vars_map = vars_map;
6209 d.from_context = cfun->decl;
6210 d.to_context = dest_cfun->decl;
6211 d.new_label_map = new_label_map;
6212 d.remap_decls_p = true;
6213 d.orig_block = orig_block;
6214 d.new_block = DECL_INITIAL (dest_cfun->decl);
6216 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6218 /* No need to update edge counts on the last block. It has
6219 already been updated earlier when we detached the region from
6220 the original CFG. */
6221 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
6222 after = bb;
6225 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6226 if (orig_block)
6228 tree block;
6229 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6230 == NULL_TREE);
6231 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6232 = BLOCK_SUBBLOCKS (orig_block);
6233 for (block = BLOCK_SUBBLOCKS (orig_block);
6234 block; block = BLOCK_CHAIN (block))
6235 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6236 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6239 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6240 vars_map, dest_cfun->decl);
6242 if (new_label_map)
6243 htab_delete (new_label_map);
6244 pointer_map_destroy (vars_map);
6246 /* Rewire the entry and exit blocks. The successor to the entry
6247 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6248 the child function. Similarly, the predecessor of DEST_FN's
6249 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6250 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6251 various CFG manipulation function get to the right CFG.
6253 FIXME, this is silly. The CFG ought to become a parameter to
6254 these helpers. */
6255 push_cfun (dest_cfun);
6256 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6257 if (exit_bb)
6258 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6259 pop_cfun ();
6261 /* Back in the original function, the SESE region has disappeared,
6262 create a new basic block in its place. */
6263 bb = create_empty_bb (entry_pred[0]);
6264 if (current_loops)
6265 add_bb_to_loop (bb, loop);
6266 for (i = 0; i < num_entry_edges; i++)
6268 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6269 e->probability = entry_prob[i];
6272 for (i = 0; i < num_exit_edges; i++)
6274 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6275 e->probability = exit_prob[i];
6278 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6279 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6280 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6281 VEC_free (basic_block, heap, dom_bbs);
6283 if (exit_bb)
6285 free (exit_prob);
6286 free (exit_flag);
6287 free (exit_succ);
6289 free (entry_prob);
6290 free (entry_flag);
6291 free (entry_pred);
6292 VEC_free (basic_block, heap, bbs);
6294 return bb;
6298 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6301 void
6302 dump_function_to_file (tree fn, FILE *file, int flags)
6304 tree arg, vars, var;
6305 struct function *dsf;
6306 bool ignore_topmost_bind = false, any_var = false;
6307 basic_block bb;
6308 tree chain;
6310 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6312 arg = DECL_ARGUMENTS (fn);
6313 while (arg)
6315 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6316 fprintf (file, " ");
6317 print_generic_expr (file, arg, dump_flags);
6318 if (flags & TDF_VERBOSE)
6319 print_node (file, "", arg, 4);
6320 if (TREE_CHAIN (arg))
6321 fprintf (file, ", ");
6322 arg = TREE_CHAIN (arg);
6324 fprintf (file, ")\n");
6326 if (flags & TDF_VERBOSE)
6327 print_node (file, "", fn, 2);
6329 dsf = DECL_STRUCT_FUNCTION (fn);
6330 if (dsf && (flags & TDF_DETAILS))
6331 dump_eh_tree (file, dsf);
6333 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6335 dump_node (fn, TDF_SLIM | flags, file);
6336 return;
6339 /* Switch CFUN to point to FN. */
6340 push_cfun (DECL_STRUCT_FUNCTION (fn));
6342 /* When GIMPLE is lowered, the variables are no longer available in
6343 BIND_EXPRs, so display them separately. */
6344 if (cfun && cfun->decl == fn && cfun->local_decls)
6346 ignore_topmost_bind = true;
6348 fprintf (file, "{\n");
6349 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6351 var = TREE_VALUE (vars);
6353 print_generic_decl (file, var, flags);
6354 if (flags & TDF_VERBOSE)
6355 print_node (file, "", var, 4);
6356 fprintf (file, "\n");
6358 any_var = true;
6362 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6364 /* If the CFG has been built, emit a CFG-based dump. */
6365 check_bb_profile (ENTRY_BLOCK_PTR, file);
6366 if (!ignore_topmost_bind)
6367 fprintf (file, "{\n");
6369 if (any_var && n_basic_blocks)
6370 fprintf (file, "\n");
6372 FOR_EACH_BB (bb)
6373 gimple_dump_bb (bb, file, 2, flags);
6375 fprintf (file, "}\n");
6376 check_bb_profile (EXIT_BLOCK_PTR, file);
6378 else if (DECL_SAVED_TREE (fn) == NULL)
6380 /* The function is now in GIMPLE form but the CFG has not been
6381 built yet. Emit the single sequence of GIMPLE statements
6382 that make up its body. */
6383 gimple_seq body = gimple_body (fn);
6385 if (gimple_seq_first_stmt (body)
6386 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6387 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6388 print_gimple_seq (file, body, 0, flags);
6389 else
6391 if (!ignore_topmost_bind)
6392 fprintf (file, "{\n");
6394 if (any_var)
6395 fprintf (file, "\n");
6397 print_gimple_seq (file, body, 2, flags);
6398 fprintf (file, "}\n");
6401 else
6403 int indent;
6405 /* Make a tree based dump. */
6406 chain = DECL_SAVED_TREE (fn);
6408 if (chain && TREE_CODE (chain) == BIND_EXPR)
6410 if (ignore_topmost_bind)
6412 chain = BIND_EXPR_BODY (chain);
6413 indent = 2;
6415 else
6416 indent = 0;
6418 else
6420 if (!ignore_topmost_bind)
6421 fprintf (file, "{\n");
6422 indent = 2;
6425 if (any_var)
6426 fprintf (file, "\n");
6428 print_generic_stmt_indented (file, chain, flags, indent);
6429 if (ignore_topmost_bind)
6430 fprintf (file, "}\n");
6433 fprintf (file, "\n\n");
6435 /* Restore CFUN. */
6436 pop_cfun ();
6440 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6442 void
6443 debug_function (tree fn, int flags)
6445 dump_function_to_file (fn, stderr, flags);
6449 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6451 static void
6452 print_pred_bbs (FILE *file, basic_block bb)
6454 edge e;
6455 edge_iterator ei;
6457 FOR_EACH_EDGE (e, ei, bb->preds)
6458 fprintf (file, "bb_%d ", e->src->index);
6462 /* Print on FILE the indexes for the successors of basic_block BB. */
6464 static void
6465 print_succ_bbs (FILE *file, basic_block bb)
6467 edge e;
6468 edge_iterator ei;
6470 FOR_EACH_EDGE (e, ei, bb->succs)
6471 fprintf (file, "bb_%d ", e->dest->index);
6474 /* Print to FILE the basic block BB following the VERBOSITY level. */
6476 void
6477 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6479 char *s_indent = (char *) alloca ((size_t) indent + 1);
6480 memset ((void *) s_indent, ' ', (size_t) indent);
6481 s_indent[indent] = '\0';
6483 /* Print basic_block's header. */
6484 if (verbosity >= 2)
6486 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6487 print_pred_bbs (file, bb);
6488 fprintf (file, "}, succs = {");
6489 print_succ_bbs (file, bb);
6490 fprintf (file, "})\n");
6493 /* Print basic_block's body. */
6494 if (verbosity >= 3)
6496 fprintf (file, "%s {\n", s_indent);
6497 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6498 fprintf (file, "%s }\n", s_indent);
6502 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6504 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6505 VERBOSITY level this outputs the contents of the loop, or just its
6506 structure. */
6508 static void
6509 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6511 char *s_indent;
6512 basic_block bb;
6514 if (loop == NULL)
6515 return;
6517 s_indent = (char *) alloca ((size_t) indent + 1);
6518 memset ((void *) s_indent, ' ', (size_t) indent);
6519 s_indent[indent] = '\0';
6521 /* Print loop's header. */
6522 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6523 loop->num, loop->header->index, loop->latch->index);
6524 fprintf (file, ", niter = ");
6525 print_generic_expr (file, loop->nb_iterations, 0);
6527 if (loop->any_upper_bound)
6529 fprintf (file, ", upper_bound = ");
6530 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6533 if (loop->any_estimate)
6535 fprintf (file, ", estimate = ");
6536 dump_double_int (file, loop->nb_iterations_estimate, true);
6538 fprintf (file, ")\n");
6540 /* Print loop's body. */
6541 if (verbosity >= 1)
6543 fprintf (file, "%s{\n", s_indent);
6544 FOR_EACH_BB (bb)
6545 if (bb->loop_father == loop)
6546 print_loops_bb (file, bb, indent, verbosity);
6548 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6549 fprintf (file, "%s}\n", s_indent);
6553 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6554 spaces. Following VERBOSITY level this outputs the contents of the
6555 loop, or just its structure. */
6557 static void
6558 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6560 if (loop == NULL)
6561 return;
6563 print_loop (file, loop, indent, verbosity);
6564 print_loop_and_siblings (file, loop->next, indent, verbosity);
6567 /* Follow a CFG edge from the entry point of the program, and on entry
6568 of a loop, pretty print the loop structure on FILE. */
6570 void
6571 print_loops (FILE *file, int verbosity)
6573 basic_block bb;
6575 bb = ENTRY_BLOCK_PTR;
6576 if (bb && bb->loop_father)
6577 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6581 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6583 void
6584 debug_loops (int verbosity)
6586 print_loops (stderr, verbosity);
6589 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6591 void
6592 debug_loop (struct loop *loop, int verbosity)
6594 print_loop (stderr, loop, 0, verbosity);
6597 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6598 level. */
6600 void
6601 debug_loop_num (unsigned num, int verbosity)
6603 debug_loop (get_loop (num), verbosity);
6606 /* Return true if BB ends with a call, possibly followed by some
6607 instructions that must stay with the call. Return false,
6608 otherwise. */
6610 static bool
6611 gimple_block_ends_with_call_p (basic_block bb)
6613 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6614 return is_gimple_call (gsi_stmt (gsi));
6618 /* Return true if BB ends with a conditional branch. Return false,
6619 otherwise. */
6621 static bool
6622 gimple_block_ends_with_condjump_p (const_basic_block bb)
6624 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6625 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6629 /* Return true if we need to add fake edge to exit at statement T.
6630 Helper function for gimple_flow_call_edges_add. */
6632 static bool
6633 need_fake_edge_p (gimple t)
6635 tree fndecl = NULL_TREE;
6636 int call_flags = 0;
6638 /* NORETURN and LONGJMP calls already have an edge to exit.
6639 CONST and PURE calls do not need one.
6640 We don't currently check for CONST and PURE here, although
6641 it would be a good idea, because those attributes are
6642 figured out from the RTL in mark_constant_function, and
6643 the counter incrementation code from -fprofile-arcs
6644 leads to different results from -fbranch-probabilities. */
6645 if (is_gimple_call (t))
6647 fndecl = gimple_call_fndecl (t);
6648 call_flags = gimple_call_flags (t);
6651 if (is_gimple_call (t)
6652 && fndecl
6653 && DECL_BUILT_IN (fndecl)
6654 && (call_flags & ECF_NOTHROW)
6655 && !(call_flags & ECF_RETURNS_TWICE)
6656 /* fork() doesn't really return twice, but the effect of
6657 wrapping it in __gcov_fork() which calls __gcov_flush()
6658 and clears the counters before forking has the same
6659 effect as returning twice. Force a fake edge. */
6660 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6661 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6662 return false;
6664 if (is_gimple_call (t)
6665 && !(call_flags & ECF_NORETURN))
6666 return true;
6668 if (gimple_code (t) == GIMPLE_ASM
6669 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6670 return true;
6672 return false;
6676 /* Add fake edges to the function exit for any non constant and non
6677 noreturn calls, volatile inline assembly in the bitmap of blocks
6678 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6679 the number of blocks that were split.
6681 The goal is to expose cases in which entering a basic block does
6682 not imply that all subsequent instructions must be executed. */
6684 static int
6685 gimple_flow_call_edges_add (sbitmap blocks)
6687 int i;
6688 int blocks_split = 0;
6689 int last_bb = last_basic_block;
6690 bool check_last_block = false;
6692 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6693 return 0;
6695 if (! blocks)
6696 check_last_block = true;
6697 else
6698 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6700 /* In the last basic block, before epilogue generation, there will be
6701 a fallthru edge to EXIT. Special care is required if the last insn
6702 of the last basic block is a call because make_edge folds duplicate
6703 edges, which would result in the fallthru edge also being marked
6704 fake, which would result in the fallthru edge being removed by
6705 remove_fake_edges, which would result in an invalid CFG.
6707 Moreover, we can't elide the outgoing fake edge, since the block
6708 profiler needs to take this into account in order to solve the minimal
6709 spanning tree in the case that the call doesn't return.
6711 Handle this by adding a dummy instruction in a new last basic block. */
6712 if (check_last_block)
6714 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6715 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6716 gimple t = NULL;
6718 if (!gsi_end_p (gsi))
6719 t = gsi_stmt (gsi);
6721 if (t && need_fake_edge_p (t))
6723 edge e;
6725 e = find_edge (bb, EXIT_BLOCK_PTR);
6726 if (e)
6728 gsi_insert_on_edge (e, gimple_build_nop ());
6729 gsi_commit_edge_inserts ();
6734 /* Now add fake edges to the function exit for any non constant
6735 calls since there is no way that we can determine if they will
6736 return or not... */
6737 for (i = 0; i < last_bb; i++)
6739 basic_block bb = BASIC_BLOCK (i);
6740 gimple_stmt_iterator gsi;
6741 gimple stmt, last_stmt;
6743 if (!bb)
6744 continue;
6746 if (blocks && !TEST_BIT (blocks, i))
6747 continue;
6749 gsi = gsi_last_bb (bb);
6750 if (!gsi_end_p (gsi))
6752 last_stmt = gsi_stmt (gsi);
6755 stmt = gsi_stmt (gsi);
6756 if (need_fake_edge_p (stmt))
6758 edge e;
6760 /* The handling above of the final block before the
6761 epilogue should be enough to verify that there is
6762 no edge to the exit block in CFG already.
6763 Calling make_edge in such case would cause us to
6764 mark that edge as fake and remove it later. */
6765 #ifdef ENABLE_CHECKING
6766 if (stmt == last_stmt)
6768 e = find_edge (bb, EXIT_BLOCK_PTR);
6769 gcc_assert (e == NULL);
6771 #endif
6773 /* Note that the following may create a new basic block
6774 and renumber the existing basic blocks. */
6775 if (stmt != last_stmt)
6777 e = split_block (bb, stmt);
6778 if (e)
6779 blocks_split++;
6781 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6783 gsi_prev (&gsi);
6785 while (!gsi_end_p (gsi));
6789 if (blocks_split)
6790 verify_flow_info ();
6792 return blocks_split;
6795 /* Purge dead abnormal call edges from basic block BB. */
6797 bool
6798 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6800 bool changed = gimple_purge_dead_eh_edges (bb);
6802 if (cfun->has_nonlocal_label)
6804 gimple stmt = last_stmt (bb);
6805 edge_iterator ei;
6806 edge e;
6808 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6809 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6811 if (e->flags & EDGE_ABNORMAL)
6813 remove_edge (e);
6814 changed = true;
6816 else
6817 ei_next (&ei);
6820 /* See gimple_purge_dead_eh_edges below. */
6821 if (changed)
6822 free_dominance_info (CDI_DOMINATORS);
6825 return changed;
6828 /* Removes edge E and all the blocks dominated by it, and updates dominance
6829 information. The IL in E->src needs to be updated separately.
6830 If dominance info is not available, only the edge E is removed.*/
6832 void
6833 remove_edge_and_dominated_blocks (edge e)
6835 VEC (basic_block, heap) *bbs_to_remove = NULL;
6836 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6837 bitmap df, df_idom;
6838 edge f;
6839 edge_iterator ei;
6840 bool none_removed = false;
6841 unsigned i;
6842 basic_block bb, dbb;
6843 bitmap_iterator bi;
6845 if (!dom_info_available_p (CDI_DOMINATORS))
6847 remove_edge (e);
6848 return;
6851 /* No updating is needed for edges to exit. */
6852 if (e->dest == EXIT_BLOCK_PTR)
6854 if (cfgcleanup_altered_bbs)
6855 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6856 remove_edge (e);
6857 return;
6860 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6861 that is not dominated by E->dest, then this set is empty. Otherwise,
6862 all the basic blocks dominated by E->dest are removed.
6864 Also, to DF_IDOM we store the immediate dominators of the blocks in
6865 the dominance frontier of E (i.e., of the successors of the
6866 removed blocks, if there are any, and of E->dest otherwise). */
6867 FOR_EACH_EDGE (f, ei, e->dest->preds)
6869 if (f == e)
6870 continue;
6872 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6874 none_removed = true;
6875 break;
6879 df = BITMAP_ALLOC (NULL);
6880 df_idom = BITMAP_ALLOC (NULL);
6882 if (none_removed)
6883 bitmap_set_bit (df_idom,
6884 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6885 else
6887 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6888 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6890 FOR_EACH_EDGE (f, ei, bb->succs)
6892 if (f->dest != EXIT_BLOCK_PTR)
6893 bitmap_set_bit (df, f->dest->index);
6896 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6897 bitmap_clear_bit (df, bb->index);
6899 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6901 bb = BASIC_BLOCK (i);
6902 bitmap_set_bit (df_idom,
6903 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6907 if (cfgcleanup_altered_bbs)
6909 /* Record the set of the altered basic blocks. */
6910 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6911 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6914 /* Remove E and the cancelled blocks. */
6915 if (none_removed)
6916 remove_edge (e);
6917 else
6919 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6920 delete_basic_block (bb);
6923 /* Update the dominance information. The immediate dominator may change only
6924 for blocks whose immediate dominator belongs to DF_IDOM:
6926 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6927 removal. Let Z the arbitrary block such that idom(Z) = Y and
6928 Z dominates X after the removal. Before removal, there exists a path P
6929 from Y to X that avoids Z. Let F be the last edge on P that is
6930 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6931 dominates W, and because of P, Z does not dominate W), and W belongs to
6932 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6933 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6935 bb = BASIC_BLOCK (i);
6936 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6937 dbb;
6938 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6939 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6942 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6944 BITMAP_FREE (df);
6945 BITMAP_FREE (df_idom);
6946 VEC_free (basic_block, heap, bbs_to_remove);
6947 VEC_free (basic_block, heap, bbs_to_fix_dom);
6950 /* Purge dead EH edges from basic block BB. */
6952 bool
6953 gimple_purge_dead_eh_edges (basic_block bb)
6955 bool changed = false;
6956 edge e;
6957 edge_iterator ei;
6958 gimple stmt = last_stmt (bb);
6960 if (stmt && stmt_can_throw_internal (stmt))
6961 return false;
6963 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6965 if (e->flags & EDGE_EH)
6967 remove_edge_and_dominated_blocks (e);
6968 changed = true;
6970 else
6971 ei_next (&ei);
6974 return changed;
6977 bool
6978 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6980 bool changed = false;
6981 unsigned i;
6982 bitmap_iterator bi;
6984 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6986 basic_block bb = BASIC_BLOCK (i);
6988 /* Earlier gimple_purge_dead_eh_edges could have removed
6989 this basic block already. */
6990 gcc_assert (bb || changed);
6991 if (bb != NULL)
6992 changed |= gimple_purge_dead_eh_edges (bb);
6995 return changed;
6998 /* This function is called whenever a new edge is created or
6999 redirected. */
7001 static void
7002 gimple_execute_on_growing_pred (edge e)
7004 basic_block bb = e->dest;
7006 if (phi_nodes (bb))
7007 reserve_phi_args_for_new_edge (bb);
7010 /* This function is called immediately before edge E is removed from
7011 the edge vector E->dest->preds. */
7013 static void
7014 gimple_execute_on_shrinking_pred (edge e)
7016 if (phi_nodes (e->dest))
7017 remove_phi_args (e);
7020 /*---------------------------------------------------------------------------
7021 Helper functions for Loop versioning
7022 ---------------------------------------------------------------------------*/
7024 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7025 of 'first'. Both of them are dominated by 'new_head' basic block. When
7026 'new_head' was created by 'second's incoming edge it received phi arguments
7027 on the edge by split_edge(). Later, additional edge 'e' was created to
7028 connect 'new_head' and 'first'. Now this routine adds phi args on this
7029 additional edge 'e' that new_head to second edge received as part of edge
7030 splitting. */
7032 static void
7033 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7034 basic_block new_head, edge e)
7036 gimple phi1, phi2;
7037 gimple_stmt_iterator psi1, psi2;
7038 tree def;
7039 edge e2 = find_edge (new_head, second);
7041 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7042 edge, we should always have an edge from NEW_HEAD to SECOND. */
7043 gcc_assert (e2 != NULL);
7045 /* Browse all 'second' basic block phi nodes and add phi args to
7046 edge 'e' for 'first' head. PHI args are always in correct order. */
7048 for (psi2 = gsi_start_phis (second),
7049 psi1 = gsi_start_phis (first);
7050 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7051 gsi_next (&psi2), gsi_next (&psi1))
7053 phi1 = gsi_stmt (psi1);
7054 phi2 = gsi_stmt (psi2);
7055 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7056 add_phi_arg (phi1, def, e);
7061 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7062 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7063 the destination of the ELSE part. */
7065 static void
7066 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7067 basic_block second_head ATTRIBUTE_UNUSED,
7068 basic_block cond_bb, void *cond_e)
7070 gimple_stmt_iterator gsi;
7071 gimple new_cond_expr;
7072 tree cond_expr = (tree) cond_e;
7073 edge e0;
7075 /* Build new conditional expr */
7076 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7077 NULL_TREE, NULL_TREE);
7079 /* Add new cond in cond_bb. */
7080 gsi = gsi_last_bb (cond_bb);
7081 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7083 /* Adjust edges appropriately to connect new head with first head
7084 as well as second head. */
7085 e0 = single_succ_edge (cond_bb);
7086 e0->flags &= ~EDGE_FALLTHRU;
7087 e0->flags |= EDGE_FALSE_VALUE;
7090 struct cfg_hooks gimple_cfg_hooks = {
7091 "gimple",
7092 gimple_verify_flow_info,
7093 gimple_dump_bb, /* dump_bb */
7094 create_bb, /* create_basic_block */
7095 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7096 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7097 gimple_can_remove_branch_p, /* can_remove_branch_p */
7098 remove_bb, /* delete_basic_block */
7099 gimple_split_block, /* split_block */
7100 gimple_move_block_after, /* move_block_after */
7101 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7102 gimple_merge_blocks, /* merge_blocks */
7103 gimple_predict_edge, /* predict_edge */
7104 gimple_predicted_by_p, /* predicted_by_p */
7105 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7106 gimple_duplicate_bb, /* duplicate_block */
7107 gimple_split_edge, /* split_edge */
7108 gimple_make_forwarder_block, /* make_forward_block */
7109 NULL, /* tidy_fallthru_edge */
7110 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7111 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7112 gimple_flow_call_edges_add, /* flow_call_edges_add */
7113 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7114 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7115 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7116 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7117 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7118 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7119 flush_pending_stmts /* flush_pending_stmts */
7123 /* Split all critical edges. */
7125 static unsigned int
7126 split_critical_edges (void)
7128 basic_block bb;
7129 edge e;
7130 edge_iterator ei;
7132 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7133 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7134 mappings around the calls to split_edge. */
7135 start_recording_case_labels ();
7136 FOR_ALL_BB (bb)
7138 FOR_EACH_EDGE (e, ei, bb->succs)
7140 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7141 split_edge (e);
7142 /* PRE inserts statements to edges and expects that
7143 since split_critical_edges was done beforehand, committing edge
7144 insertions will not split more edges. In addition to critical
7145 edges we must split edges that have multiple successors and
7146 end by control flow statements, such as RESX.
7147 Go ahead and split them too. This matches the logic in
7148 gimple_find_edge_insert_loc. */
7149 else if ((!single_pred_p (e->dest)
7150 || phi_nodes (e->dest)
7151 || e->dest == EXIT_BLOCK_PTR)
7152 && e->src != ENTRY_BLOCK_PTR
7153 && !(e->flags & EDGE_ABNORMAL))
7155 gimple_stmt_iterator gsi;
7157 gsi = gsi_last_bb (e->src);
7158 if (!gsi_end_p (gsi)
7159 && stmt_ends_bb_p (gsi_stmt (gsi))
7160 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7161 split_edge (e);
7165 end_recording_case_labels ();
7166 return 0;
7169 struct gimple_opt_pass pass_split_crit_edges =
7172 GIMPLE_PASS,
7173 "crited", /* name */
7174 NULL, /* gate */
7175 split_critical_edges, /* execute */
7176 NULL, /* sub */
7177 NULL, /* next */
7178 0, /* static_pass_number */
7179 TV_TREE_SPLIT_EDGES, /* tv_id */
7180 PROP_cfg, /* properties required */
7181 PROP_no_crit_edges, /* properties_provided */
7182 0, /* properties_destroyed */
7183 0, /* todo_flags_start */
7184 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7189 /* Build a ternary operation and gimplify it. Emit code before GSI.
7190 Return the gimple_val holding the result. */
7192 tree
7193 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7194 tree type, tree a, tree b, tree c)
7196 tree ret;
7198 ret = fold_build3 (code, type, a, b, c);
7199 STRIP_NOPS (ret);
7201 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7202 GSI_SAME_STMT);
7205 /* Build a binary operation and gimplify it. Emit code before GSI.
7206 Return the gimple_val holding the result. */
7208 tree
7209 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7210 tree type, tree a, tree b)
7212 tree ret;
7214 ret = fold_build2 (code, type, a, b);
7215 STRIP_NOPS (ret);
7217 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7218 GSI_SAME_STMT);
7221 /* Build a unary operation and gimplify it. Emit code before GSI.
7222 Return the gimple_val holding the result. */
7224 tree
7225 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7226 tree a)
7228 tree ret;
7230 ret = fold_build1 (code, type, a);
7231 STRIP_NOPS (ret);
7233 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7234 GSI_SAME_STMT);
7239 /* Emit return warnings. */
7241 static unsigned int
7242 execute_warn_function_return (void)
7244 source_location location;
7245 gimple last;
7246 edge e;
7247 edge_iterator ei;
7249 /* If we have a path to EXIT, then we do return. */
7250 if (TREE_THIS_VOLATILE (cfun->decl)
7251 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7253 location = UNKNOWN_LOCATION;
7254 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7256 last = last_stmt (e->src);
7257 if (gimple_code (last) == GIMPLE_RETURN
7258 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7259 break;
7261 if (location == UNKNOWN_LOCATION)
7262 location = cfun->function_end_locus;
7263 warning (0, "%H%<noreturn%> function does return", &location);
7266 /* If we see "return;" in some basic block, then we do reach the end
7267 without returning a value. */
7268 else if (warn_return_type
7269 && !TREE_NO_WARNING (cfun->decl)
7270 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7271 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7273 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7275 gimple last = last_stmt (e->src);
7276 if (gimple_code (last) == GIMPLE_RETURN
7277 && gimple_return_retval (last) == NULL
7278 && !gimple_no_warning_p (last))
7280 location = gimple_location (last);
7281 if (location == UNKNOWN_LOCATION)
7282 location = cfun->function_end_locus;
7283 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7284 TREE_NO_WARNING (cfun->decl) = 1;
7285 break;
7289 return 0;
7293 /* Given a basic block B which ends with a conditional and has
7294 precisely two successors, determine which of the edges is taken if
7295 the conditional is true and which is taken if the conditional is
7296 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7298 void
7299 extract_true_false_edges_from_block (basic_block b,
7300 edge *true_edge,
7301 edge *false_edge)
7303 edge e = EDGE_SUCC (b, 0);
7305 if (e->flags & EDGE_TRUE_VALUE)
7307 *true_edge = e;
7308 *false_edge = EDGE_SUCC (b, 1);
7310 else
7312 *false_edge = e;
7313 *true_edge = EDGE_SUCC (b, 1);
7317 struct gimple_opt_pass pass_warn_function_return =
7320 GIMPLE_PASS,
7321 NULL, /* name */
7322 NULL, /* gate */
7323 execute_warn_function_return, /* execute */
7324 NULL, /* sub */
7325 NULL, /* next */
7326 0, /* static_pass_number */
7327 TV_NONE, /* tv_id */
7328 PROP_cfg, /* properties_required */
7329 0, /* properties_provided */
7330 0, /* properties_destroyed */
7331 0, /* todo_flags_start */
7332 0 /* todo_flags_finish */
7336 /* Emit noreturn warnings. */
7338 static unsigned int
7339 execute_warn_function_noreturn (void)
7341 if (warn_missing_noreturn
7342 && !TREE_THIS_VOLATILE (cfun->decl)
7343 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7344 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7345 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7346 "for attribute %<noreturn%>",
7347 cfun->decl);
7348 return 0;
7351 struct gimple_opt_pass pass_warn_function_noreturn =
7354 GIMPLE_PASS,
7355 NULL, /* name */
7356 NULL, /* gate */
7357 execute_warn_function_noreturn, /* execute */
7358 NULL, /* sub */
7359 NULL, /* next */
7360 0, /* static_pass_number */
7361 TV_NONE, /* tv_id */
7362 PROP_cfg, /* properties_required */
7363 0, /* properties_provided */
7364 0, /* properties_destroyed */
7365 0, /* todo_flags_start */
7366 0 /* todo_flags_finish */