* zh_CN.po: Update.
[official-gcc.git] / gcc / tree-cfg.c
blobe5ed9ec7006a0b1ab7b52e8a9256ddf39f62d62e
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
88 location_t locus;
89 int discriminator;
91 static htab_t discriminator_per_locus;
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static void make_gimple_asm_edges (basic_block);
103 static unsigned int locus_map_hash (const void *);
104 static int locus_map_eq (const void *, const void *);
105 static void assign_discriminator (location_t, basic_block);
106 static edge gimple_redirect_edge_and_branch (edge, basic_block);
107 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (gimple, gimple);
112 static int gimple_verify_flow_info (void);
113 static void gimple_make_forwarder_block (edge);
114 static void gimple_cfg2vcg (FILE *);
115 static gimple first_non_label_stmt (basic_block);
117 /* Flowgraph optimization and cleanup. */
118 static void gimple_merge_blocks (basic_block, basic_block);
119 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (gimple, tree);
126 void
127 init_empty_tree_cfg_for_function (struct function *fn)
129 /* Initialize the basic block array. */
130 init_flow (fn);
131 profile_status_for_function (fn) = PROFILE_ABSENT;
132 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134 basic_block_info_for_function (fn)
135 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
136 VEC_safe_grow_cleared (basic_block, gc,
137 basic_block_info_for_function (fn),
138 initial_cfg_capacity);
140 /* Build a mapping of labels to their associated blocks. */
141 label_to_block_map_for_function (fn)
142 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
143 VEC_safe_grow_cleared (basic_block, gc,
144 label_to_block_map_for_function (fn),
145 initial_cfg_capacity);
147 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
148 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
149 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
150 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
152 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
153 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
154 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
155 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
158 void
159 init_empty_tree_cfg (void)
161 init_empty_tree_cfg_for_function (cfun);
164 /*---------------------------------------------------------------------------
165 Create basic blocks
166 ---------------------------------------------------------------------------*/
168 /* Entry point to the CFG builder for trees. SEQ is the sequence of
169 statements to be added to the flowgraph. */
171 static void
172 build_gimple_cfg (gimple_seq seq)
174 /* Register specific gimple functions. */
175 gimple_register_cfg_hooks ();
177 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
179 init_empty_tree_cfg ();
181 found_computed_goto = 0;
182 make_blocks (seq);
184 /* Computed gotos are hell to deal with, especially if there are
185 lots of them with a large number of destinations. So we factor
186 them to a common computed goto location before we build the
187 edge list. After we convert back to normal form, we will un-factor
188 the computed gotos since factoring introduces an unwanted jump. */
189 if (found_computed_goto)
190 factor_computed_gotos ();
192 /* Make sure there is always at least one block, even if it's empty. */
193 if (n_basic_blocks == NUM_FIXED_BLOCKS)
194 create_empty_bb (ENTRY_BLOCK_PTR);
196 /* Adjust the size of the array. */
197 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
198 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
200 /* To speed up statement iterator walks, we first purge dead labels. */
201 cleanup_dead_labels ();
203 /* Group case nodes to reduce the number of edges.
204 We do this after cleaning up dead labels because otherwise we miss
205 a lot of obvious case merging opportunities. */
206 group_case_labels ();
208 /* Create the edges of the flowgraph. */
209 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
210 free);
211 make_edges ();
212 cleanup_dead_labels ();
213 htab_delete (discriminator_per_locus);
215 /* Debugging dumps. */
217 /* Write the flowgraph to a VCG file. */
219 int local_dump_flags;
220 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
221 if (vcg_file)
223 gimple_cfg2vcg (vcg_file);
224 dump_end (TDI_vcg, vcg_file);
228 #ifdef ENABLE_CHECKING
229 verify_stmts ();
230 #endif
233 static unsigned int
234 execute_build_cfg (void)
236 gimple_seq body = gimple_body (current_function_decl);
238 build_gimple_cfg (body);
239 gimple_set_body (current_function_decl, NULL);
240 if (dump_file && (dump_flags & TDF_DETAILS))
242 fprintf (dump_file, "Scope blocks:\n");
243 dump_scope_blocks (dump_file, dump_flags);
245 return 0;
248 struct gimple_opt_pass pass_build_cfg =
251 GIMPLE_PASS,
252 "cfg", /* name */
253 NULL, /* gate */
254 execute_build_cfg, /* execute */
255 NULL, /* sub */
256 NULL, /* next */
257 0, /* static_pass_number */
258 TV_TREE_CFG, /* tv_id */
259 PROP_gimple_leh, /* properties_required */
260 PROP_cfg, /* properties_provided */
261 0, /* properties_destroyed */
262 0, /* todo_flags_start */
263 TODO_verify_stmts | TODO_cleanup_cfg
264 | TODO_dump_func /* todo_flags_finish */
269 /* Return true if T is a computed goto. */
271 static bool
272 computed_goto_p (gimple t)
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
279 /* Search the CFG for any computed gotos. If found, factor them to a
280 common computed goto site. Also record the location of that site so
281 that we can un-factor the gotos after we have converted back to
282 normal form. */
284 static void
285 factor_computed_gotos (void)
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
297 FOR_EACH_BB (bb)
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
302 if (gsi_end_p (gsi))
303 continue;
305 last = gsi_stmt (gsi);
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
315 gimple assignment;
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
320 if (!factored_computed_goto)
322 basic_block new_bb = create_empty_bb (bb);
323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 factored_computed_goto_label
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
339 /* Build our new computed goto. */
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
344 /* Copy the original computed goto's destination into VAR. */
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348 /* And re-vector the computed goto to the new destination. */
349 gimple_goto_set_dest (last, factored_label_decl);
355 /* Build a flowgraph for the sequence of stmts SEQ. */
357 static void
358 make_blocks (gimple_seq seq)
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
362 bool start_new_block = true;
363 bool first_stmt_of_seq = true;
364 basic_block bb = ENTRY_BLOCK_PTR;
366 while (!gsi_end_p (i))
368 gimple prev_stmt;
370 prev_stmt = stmt;
371 stmt = gsi_stmt (i);
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 if (!first_stmt_of_seq)
379 seq = gsi_split_seq_before (&i);
380 bb = create_basic_block (seq, NULL, bb);
381 start_new_block = false;
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
386 gimple_set_bb (stmt, bb);
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 start_new_block = true;
418 gsi_next (&i);
419 first_stmt_of_seq = false;
424 /* Create and return a new empty basic block after bb AFTER. */
426 static basic_block
427 create_bb (void *h, void *e, basic_block after)
429 basic_block bb;
431 gcc_assert (!e);
433 /* Create and initialize a new basic block. Since alloc_block uses
434 ggc_alloc_cleared to allocate a basic block, we do not have to
435 clear the newly allocated basic block here. */
436 bb = alloc_block ();
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
440 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
441 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
446 /* Grow the basic block array if needed. */
447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
453 /* Add the newly created block to the array. */
454 SET_BASIC_BLOCK (last_basic_block, bb);
456 n_basic_blocks++;
457 last_basic_block++;
459 return bb;
463 /*---------------------------------------------------------------------------
464 Edge creation
465 ---------------------------------------------------------------------------*/
467 /* Fold COND_EXPR_COND of each COND_EXPR. */
469 void
470 fold_cond_expr_cond (void)
472 basic_block bb;
474 FOR_EACH_BB (bb)
476 gimple stmt = last_stmt (bb);
478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 location_t loc = gimple_location (stmt);
481 tree cond;
482 bool zerop, onep;
484 fold_defer_overflow_warnings ();
485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
492 else
493 zerop = onep = false;
495 fold_undefer_overflow_warnings (zerop || onep,
496 stmt,
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
499 gimple_cond_make_false (stmt);
500 else if (onep)
501 gimple_cond_make_true (stmt);
506 /* Join all the blocks in the flowgraph. */
508 static void
509 make_edges (void)
511 basic_block bb;
512 struct omp_region *cur_region = NULL;
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518 /* Traverse the basic block array placing edges. */
519 FOR_EACH_BB (bb)
521 gimple last = last_stmt (bb);
522 bool fallthru;
524 if (last)
526 enum gimple_code code = gimple_code (last);
527 switch (code)
529 case GIMPLE_GOTO:
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
533 case GIMPLE_RETURN:
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
537 case GIMPLE_COND:
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
543 fallthru = false;
544 break;
545 case GIMPLE_RESX:
546 make_eh_edges (last);
547 fallthru = false;
548 break;
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
553 case GIMPLE_CALL:
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
557 if (stmt_can_make_abnormal_goto (last))
558 make_abnormal_goto_edges (bb, true);
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
564 /* Some calls are known not to return. */
565 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
566 break;
568 case GIMPLE_ASSIGN:
569 /* A GIMPLE_ASSIGN may throw internally and thus be considered
570 control-altering. */
571 if (is_ctrl_altering_stmt (last))
572 make_eh_edges (last);
573 fallthru = true;
574 break;
576 case GIMPLE_ASM:
577 make_gimple_asm_edges (bb);
578 fallthru = true;
579 break;
581 case GIMPLE_OMP_PARALLEL:
582 case GIMPLE_OMP_TASK:
583 case GIMPLE_OMP_FOR:
584 case GIMPLE_OMP_SINGLE:
585 case GIMPLE_OMP_MASTER:
586 case GIMPLE_OMP_ORDERED:
587 case GIMPLE_OMP_CRITICAL:
588 case GIMPLE_OMP_SECTION:
589 cur_region = new_omp_region (bb, code, cur_region);
590 fallthru = true;
591 break;
593 case GIMPLE_OMP_SECTIONS:
594 cur_region = new_omp_region (bb, code, cur_region);
595 fallthru = true;
596 break;
598 case GIMPLE_OMP_SECTIONS_SWITCH:
599 fallthru = false;
600 break;
602 case GIMPLE_OMP_ATOMIC_LOAD:
603 case GIMPLE_OMP_ATOMIC_STORE:
604 fallthru = true;
605 break;
607 case GIMPLE_OMP_RETURN:
608 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
609 somewhere other than the next block. This will be
610 created later. */
611 cur_region->exit = bb;
612 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
613 cur_region = cur_region->outer;
614 break;
616 case GIMPLE_OMP_CONTINUE:
617 cur_region->cont = bb;
618 switch (cur_region->type)
620 case GIMPLE_OMP_FOR:
621 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
622 succs edges as abnormal to prevent splitting
623 them. */
624 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
625 /* Make the loopback edge. */
626 make_edge (bb, single_succ (cur_region->entry),
627 EDGE_ABNORMAL);
629 /* Create an edge from GIMPLE_OMP_FOR to exit, which
630 corresponds to the case that the body of the loop
631 is not executed at all. */
632 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
634 fallthru = false;
635 break;
637 case GIMPLE_OMP_SECTIONS:
638 /* Wire up the edges into and out of the nested sections. */
640 basic_block switch_bb = single_succ (cur_region->entry);
642 struct omp_region *i;
643 for (i = cur_region->inner; i ; i = i->next)
645 gcc_assert (i->type == GIMPLE_OMP_SECTION);
646 make_edge (switch_bb, i->entry, 0);
647 make_edge (i->exit, bb, EDGE_FALLTHRU);
650 /* Make the loopback edge to the block with
651 GIMPLE_OMP_SECTIONS_SWITCH. */
652 make_edge (bb, switch_bb, 0);
654 /* Make the edge from the switch to exit. */
655 make_edge (switch_bb, bb->next_bb, 0);
656 fallthru = false;
658 break;
660 default:
661 gcc_unreachable ();
663 break;
665 default:
666 gcc_assert (!stmt_ends_bb_p (last));
667 fallthru = true;
670 else
671 fallthru = true;
673 if (fallthru)
675 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
676 if (last)
677 assign_discriminator (gimple_location (last), bb->next_bb);
681 if (root_omp_region)
682 free_omp_regions ();
684 /* Fold COND_EXPR_COND of each COND_EXPR. */
685 fold_cond_expr_cond ();
688 /* Trivial hash function for a location_t. ITEM is a pointer to
689 a hash table entry that maps a location_t to a discriminator. */
691 static unsigned int
692 locus_map_hash (const void *item)
694 return ((const struct locus_discrim_map *) item)->locus;
697 /* Equality function for the locus-to-discriminator map. VA and VB
698 point to the two hash table entries to compare. */
700 static int
701 locus_map_eq (const void *va, const void *vb)
703 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
704 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
705 return a->locus == b->locus;
708 /* Find the next available discriminator value for LOCUS. The
709 discriminator distinguishes among several basic blocks that
710 share a common locus, allowing for more accurate sample-based
711 profiling. */
713 static int
714 next_discriminator_for_locus (location_t locus)
716 struct locus_discrim_map item;
717 struct locus_discrim_map **slot;
719 item.locus = locus;
720 item.discriminator = 0;
721 slot = (struct locus_discrim_map **)
722 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
723 (hashval_t) locus, INSERT);
724 gcc_assert (slot);
725 if (*slot == HTAB_EMPTY_ENTRY)
727 *slot = XNEW (struct locus_discrim_map);
728 gcc_assert (*slot);
729 (*slot)->locus = locus;
730 (*slot)->discriminator = 0;
732 (*slot)->discriminator++;
733 return (*slot)->discriminator;
736 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
738 static bool
739 same_line_p (location_t locus1, location_t locus2)
741 expanded_location from, to;
743 if (locus1 == locus2)
744 return true;
746 from = expand_location (locus1);
747 to = expand_location (locus2);
749 if (from.line != to.line)
750 return false;
751 if (from.file == to.file)
752 return true;
753 return (from.file != NULL
754 && to.file != NULL
755 && strcmp (from.file, to.file) == 0);
758 /* Assign a unique discriminator value to block BB if it begins at the same
759 LOCUS as its predecessor block. */
761 static void
762 assign_discriminator (location_t locus, basic_block bb)
764 gimple first_in_to_bb, last_in_to_bb;
766 if (locus == 0 || bb->discriminator != 0)
767 return;
769 first_in_to_bb = first_non_label_stmt (bb);
770 last_in_to_bb = last_stmt (bb);
771 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
772 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
773 bb->discriminator = next_discriminator_for_locus (locus);
776 /* Create the edges for a GIMPLE_COND starting at block BB. */
778 static void
779 make_cond_expr_edges (basic_block bb)
781 gimple entry = last_stmt (bb);
782 gimple then_stmt, else_stmt;
783 basic_block then_bb, else_bb;
784 tree then_label, else_label;
785 edge e;
786 location_t entry_locus;
788 gcc_assert (entry);
789 gcc_assert (gimple_code (entry) == GIMPLE_COND);
791 entry_locus = gimple_location (entry);
793 /* Entry basic blocks for each component. */
794 then_label = gimple_cond_true_label (entry);
795 else_label = gimple_cond_false_label (entry);
796 then_bb = label_to_block (then_label);
797 else_bb = label_to_block (else_label);
798 then_stmt = first_stmt (then_bb);
799 else_stmt = first_stmt (else_bb);
801 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
802 assign_discriminator (entry_locus, then_bb);
803 e->goto_locus = gimple_location (then_stmt);
804 if (e->goto_locus)
805 e->goto_block = gimple_block (then_stmt);
806 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
807 if (e)
809 assign_discriminator (entry_locus, else_bb);
810 e->goto_locus = gimple_location (else_stmt);
811 if (e->goto_locus)
812 e->goto_block = gimple_block (else_stmt);
815 /* We do not need the labels anymore. */
816 gimple_cond_set_true_label (entry, NULL_TREE);
817 gimple_cond_set_false_label (entry, NULL_TREE);
821 /* Called for each element in the hash table (P) as we delete the
822 edge to cases hash table.
824 Clear all the TREE_CHAINs to prevent problems with copying of
825 SWITCH_EXPRs and structure sharing rules, then free the hash table
826 element. */
828 static bool
829 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
830 void *data ATTRIBUTE_UNUSED)
832 tree t, next;
834 for (t = (tree) *value; t; t = next)
836 next = TREE_CHAIN (t);
837 TREE_CHAIN (t) = NULL;
840 *value = NULL;
841 return false;
844 /* Start recording information mapping edges to case labels. */
846 void
847 start_recording_case_labels (void)
849 gcc_assert (edge_to_cases == NULL);
850 edge_to_cases = pointer_map_create ();
853 /* Return nonzero if we are recording information for case labels. */
855 static bool
856 recording_case_labels_p (void)
858 return (edge_to_cases != NULL);
861 /* Stop recording information mapping edges to case labels and
862 remove any information we have recorded. */
863 void
864 end_recording_case_labels (void)
866 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
867 pointer_map_destroy (edge_to_cases);
868 edge_to_cases = NULL;
871 /* If we are inside a {start,end}_recording_cases block, then return
872 a chain of CASE_LABEL_EXPRs from T which reference E.
874 Otherwise return NULL. */
876 static tree
877 get_cases_for_edge (edge e, gimple t)
879 void **slot;
880 size_t i, n;
882 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
883 chains available. Return NULL so the caller can detect this case. */
884 if (!recording_case_labels_p ())
885 return NULL;
887 slot = pointer_map_contains (edge_to_cases, e);
888 if (slot)
889 return (tree) *slot;
891 /* If we did not find E in the hash table, then this must be the first
892 time we have been queried for information about E & T. Add all the
893 elements from T to the hash table then perform the query again. */
895 n = gimple_switch_num_labels (t);
896 for (i = 0; i < n; i++)
898 tree elt = gimple_switch_label (t, i);
899 tree lab = CASE_LABEL (elt);
900 basic_block label_bb = label_to_block (lab);
901 edge this_edge = find_edge (e->src, label_bb);
903 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
904 a new chain. */
905 slot = pointer_map_insert (edge_to_cases, this_edge);
906 TREE_CHAIN (elt) = (tree) *slot;
907 *slot = elt;
910 return (tree) *pointer_map_contains (edge_to_cases, e);
913 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
915 static void
916 make_gimple_switch_edges (basic_block bb)
918 gimple entry = last_stmt (bb);
919 location_t entry_locus;
920 size_t i, n;
922 entry_locus = gimple_location (entry);
924 n = gimple_switch_num_labels (entry);
926 for (i = 0; i < n; ++i)
928 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
929 basic_block label_bb = label_to_block (lab);
930 make_edge (bb, label_bb, 0);
931 assign_discriminator (entry_locus, label_bb);
936 /* Return the basic block holding label DEST. */
938 basic_block
939 label_to_block_fn (struct function *ifun, tree dest)
941 int uid = LABEL_DECL_UID (dest);
943 /* We would die hard when faced by an undefined label. Emit a label to
944 the very first basic block. This will hopefully make even the dataflow
945 and undefined variable warnings quite right. */
946 if ((errorcount || sorrycount) && uid < 0)
948 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
949 gimple stmt;
951 stmt = gimple_build_label (dest);
952 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
953 uid = LABEL_DECL_UID (dest);
955 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
956 <= (unsigned int) uid)
957 return NULL;
958 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
961 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
962 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
964 void
965 make_abnormal_goto_edges (basic_block bb, bool for_call)
967 basic_block target_bb;
968 gimple_stmt_iterator gsi;
970 FOR_EACH_BB (target_bb)
971 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
973 gimple label_stmt = gsi_stmt (gsi);
974 tree target;
976 if (gimple_code (label_stmt) != GIMPLE_LABEL)
977 break;
979 target = gimple_label_label (label_stmt);
981 /* Make an edge to every label block that has been marked as a
982 potential target for a computed goto or a non-local goto. */
983 if ((FORCED_LABEL (target) && !for_call)
984 || (DECL_NONLOCAL (target) && for_call))
986 make_edge (bb, target_bb, EDGE_ABNORMAL);
987 break;
992 /* Create edges for a goto statement at block BB. */
994 static void
995 make_goto_expr_edges (basic_block bb)
997 gimple_stmt_iterator last = gsi_last_bb (bb);
998 gimple goto_t = gsi_stmt (last);
1000 /* A simple GOTO creates normal edges. */
1001 if (simple_goto_p (goto_t))
1003 tree dest = gimple_goto_dest (goto_t);
1004 basic_block label_bb = label_to_block (dest);
1005 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1006 e->goto_locus = gimple_location (goto_t);
1007 assign_discriminator (e->goto_locus, label_bb);
1008 if (e->goto_locus)
1009 e->goto_block = gimple_block (goto_t);
1010 gsi_remove (&last, true);
1011 return;
1014 /* A computed GOTO creates abnormal edges. */
1015 make_abnormal_goto_edges (bb, false);
1018 /* Create edges for an asm statement with labels at block BB. */
1020 static void
1021 make_gimple_asm_edges (basic_block bb)
1023 gimple stmt = last_stmt (bb);
1024 location_t stmt_loc = gimple_location (stmt);
1025 int i, n = gimple_asm_nlabels (stmt);
1027 for (i = 0; i < n; ++i)
1029 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1030 basic_block label_bb = label_to_block (label);
1031 make_edge (bb, label_bb, 0);
1032 assign_discriminator (stmt_loc, label_bb);
1036 /*---------------------------------------------------------------------------
1037 Flowgraph analysis
1038 ---------------------------------------------------------------------------*/
1040 /* Cleanup useless labels in basic blocks. This is something we wish
1041 to do early because it allows us to group case labels before creating
1042 the edges for the CFG, and it speeds up block statement iterators in
1043 all passes later on.
1044 We rerun this pass after CFG is created, to get rid of the labels that
1045 are no longer referenced. After then we do not run it any more, since
1046 (almost) no new labels should be created. */
1048 /* A map from basic block index to the leading label of that block. */
1049 static struct label_record
1051 /* The label. */
1052 tree label;
1054 /* True if the label is referenced from somewhere. */
1055 bool used;
1056 } *label_for_bb;
1058 /* Given LABEL return the first label in the same basic block. */
1060 static tree
1061 main_block_label (tree label)
1063 basic_block bb = label_to_block (label);
1064 tree main_label = label_for_bb[bb->index].label;
1066 /* label_to_block possibly inserted undefined label into the chain. */
1067 if (!main_label)
1069 label_for_bb[bb->index].label = label;
1070 main_label = label;
1073 label_for_bb[bb->index].used = true;
1074 return main_label;
1077 /* Clean up redundant labels within the exception tree. */
1079 static void
1080 cleanup_dead_labels_eh (void)
1082 eh_landing_pad lp;
1083 eh_region r;
1084 tree lab;
1085 int i;
1087 if (cfun->eh == NULL)
1088 return;
1090 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1091 if (lp && lp->post_landing_pad)
1093 lab = main_block_label (lp->post_landing_pad);
1094 if (lab != lp->post_landing_pad)
1096 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1097 EH_LANDING_PAD_NR (lab) = lp->index;
1101 FOR_ALL_EH_REGION (r)
1102 switch (r->type)
1104 case ERT_CLEANUP:
1105 case ERT_MUST_NOT_THROW:
1106 break;
1108 case ERT_TRY:
1110 eh_catch c;
1111 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1113 lab = c->label;
1114 if (lab)
1115 c->label = main_block_label (lab);
1118 break;
1120 case ERT_ALLOWED_EXCEPTIONS:
1121 lab = r->u.allowed.label;
1122 if (lab)
1123 r->u.allowed.label = main_block_label (lab);
1124 break;
1129 /* Cleanup redundant labels. This is a three-step process:
1130 1) Find the leading label for each block.
1131 2) Redirect all references to labels to the leading labels.
1132 3) Cleanup all useless labels. */
1134 void
1135 cleanup_dead_labels (void)
1137 basic_block bb;
1138 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1140 /* Find a suitable label for each block. We use the first user-defined
1141 label if there is one, or otherwise just the first label we see. */
1142 FOR_EACH_BB (bb)
1144 gimple_stmt_iterator i;
1146 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1148 tree label;
1149 gimple stmt = gsi_stmt (i);
1151 if (gimple_code (stmt) != GIMPLE_LABEL)
1152 break;
1154 label = gimple_label_label (stmt);
1156 /* If we have not yet seen a label for the current block,
1157 remember this one and see if there are more labels. */
1158 if (!label_for_bb[bb->index].label)
1160 label_for_bb[bb->index].label = label;
1161 continue;
1164 /* If we did see a label for the current block already, but it
1165 is an artificially created label, replace it if the current
1166 label is a user defined label. */
1167 if (!DECL_ARTIFICIAL (label)
1168 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1170 label_for_bb[bb->index].label = label;
1171 break;
1176 /* Now redirect all jumps/branches to the selected label.
1177 First do so for each block ending in a control statement. */
1178 FOR_EACH_BB (bb)
1180 gimple stmt = last_stmt (bb);
1181 if (!stmt)
1182 continue;
1184 switch (gimple_code (stmt))
1186 case GIMPLE_COND:
1188 tree true_label = gimple_cond_true_label (stmt);
1189 tree false_label = gimple_cond_false_label (stmt);
1191 if (true_label)
1192 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1193 if (false_label)
1194 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1195 break;
1198 case GIMPLE_SWITCH:
1200 size_t i, n = gimple_switch_num_labels (stmt);
1202 /* Replace all destination labels. */
1203 for (i = 0; i < n; ++i)
1205 tree case_label = gimple_switch_label (stmt, i);
1206 tree label = main_block_label (CASE_LABEL (case_label));
1207 CASE_LABEL (case_label) = label;
1209 break;
1212 case GIMPLE_ASM:
1214 int i, n = gimple_asm_nlabels (stmt);
1216 for (i = 0; i < n; ++i)
1218 tree cons = gimple_asm_label_op (stmt, i);
1219 tree label = main_block_label (TREE_VALUE (cons));
1220 TREE_VALUE (cons) = label;
1222 break;
1225 /* We have to handle gotos until they're removed, and we don't
1226 remove them until after we've created the CFG edges. */
1227 case GIMPLE_GOTO:
1228 if (!computed_goto_p (stmt))
1230 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1231 gimple_goto_set_dest (stmt, new_dest);
1233 break;
1235 default:
1236 break;
1240 /* Do the same for the exception region tree labels. */
1241 cleanup_dead_labels_eh ();
1243 /* Finally, purge dead labels. All user-defined labels and labels that
1244 can be the target of non-local gotos and labels which have their
1245 address taken are preserved. */
1246 FOR_EACH_BB (bb)
1248 gimple_stmt_iterator i;
1249 tree label_for_this_bb = label_for_bb[bb->index].label;
1251 if (!label_for_this_bb)
1252 continue;
1254 /* If the main label of the block is unused, we may still remove it. */
1255 if (!label_for_bb[bb->index].used)
1256 label_for_this_bb = NULL;
1258 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1260 tree label;
1261 gimple stmt = gsi_stmt (i);
1263 if (gimple_code (stmt) != GIMPLE_LABEL)
1264 break;
1266 label = gimple_label_label (stmt);
1268 if (label == label_for_this_bb
1269 || !DECL_ARTIFICIAL (label)
1270 || DECL_NONLOCAL (label)
1271 || FORCED_LABEL (label))
1272 gsi_next (&i);
1273 else
1274 gsi_remove (&i, true);
1278 free (label_for_bb);
1281 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1282 and scan the sorted vector of cases. Combine the ones jumping to the
1283 same label.
1284 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1286 void
1287 group_case_labels (void)
1289 basic_block bb;
1291 FOR_EACH_BB (bb)
1293 gimple stmt = last_stmt (bb);
1294 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1296 int old_size = gimple_switch_num_labels (stmt);
1297 int i, j, new_size = old_size;
1298 tree default_case = NULL_TREE;
1299 tree default_label = NULL_TREE;
1300 bool has_default;
1302 /* The default label is always the first case in a switch
1303 statement after gimplification if it was not optimized
1304 away */
1305 if (!CASE_LOW (gimple_switch_default_label (stmt))
1306 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1308 default_case = gimple_switch_default_label (stmt);
1309 default_label = CASE_LABEL (default_case);
1310 has_default = true;
1312 else
1313 has_default = false;
1315 /* Look for possible opportunities to merge cases. */
1316 if (has_default)
1317 i = 1;
1318 else
1319 i = 0;
1320 while (i < old_size)
1322 tree base_case, base_label, base_high;
1323 base_case = gimple_switch_label (stmt, i);
1325 gcc_assert (base_case);
1326 base_label = CASE_LABEL (base_case);
1328 /* Discard cases that have the same destination as the
1329 default case. */
1330 if (base_label == default_label)
1332 gimple_switch_set_label (stmt, i, NULL_TREE);
1333 i++;
1334 new_size--;
1335 continue;
1338 base_high = CASE_HIGH (base_case)
1339 ? CASE_HIGH (base_case)
1340 : CASE_LOW (base_case);
1341 i++;
1343 /* Try to merge case labels. Break out when we reach the end
1344 of the label vector or when we cannot merge the next case
1345 label with the current one. */
1346 while (i < old_size)
1348 tree merge_case = gimple_switch_label (stmt, i);
1349 tree merge_label = CASE_LABEL (merge_case);
1350 tree t = int_const_binop (PLUS_EXPR, base_high,
1351 integer_one_node, 1);
1353 /* Merge the cases if they jump to the same place,
1354 and their ranges are consecutive. */
1355 if (merge_label == base_label
1356 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1358 base_high = CASE_HIGH (merge_case) ?
1359 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1360 CASE_HIGH (base_case) = base_high;
1361 gimple_switch_set_label (stmt, i, NULL_TREE);
1362 new_size--;
1363 i++;
1365 else
1366 break;
1370 /* Compress the case labels in the label vector, and adjust the
1371 length of the vector. */
1372 for (i = 0, j = 0; i < new_size; i++)
1374 while (! gimple_switch_label (stmt, j))
1375 j++;
1376 gimple_switch_set_label (stmt, i,
1377 gimple_switch_label (stmt, j++));
1380 gcc_assert (new_size <= old_size);
1381 gimple_switch_set_num_labels (stmt, new_size);
1386 /* Checks whether we can merge block B into block A. */
1388 static bool
1389 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1391 gimple stmt;
1392 gimple_stmt_iterator gsi;
1393 gimple_seq phis;
1395 if (!single_succ_p (a))
1396 return false;
1398 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1399 return false;
1401 if (single_succ (a) != b)
1402 return false;
1404 if (!single_pred_p (b))
1405 return false;
1407 if (b == EXIT_BLOCK_PTR)
1408 return false;
1410 /* If A ends by a statement causing exceptions or something similar, we
1411 cannot merge the blocks. */
1412 stmt = last_stmt (a);
1413 if (stmt && stmt_ends_bb_p (stmt))
1414 return false;
1416 /* Do not allow a block with only a non-local label to be merged. */
1417 if (stmt
1418 && gimple_code (stmt) == GIMPLE_LABEL
1419 && DECL_NONLOCAL (gimple_label_label (stmt)))
1420 return false;
1422 /* Examine the labels at the beginning of B. */
1423 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1425 tree lab;
1426 stmt = gsi_stmt (gsi);
1427 if (gimple_code (stmt) != GIMPLE_LABEL)
1428 break;
1429 lab = gimple_label_label (stmt);
1431 /* Do not remove user labels. */
1432 if (!DECL_ARTIFICIAL (lab))
1433 return false;
1436 /* Protect the loop latches. */
1437 if (current_loops && b->loop_father->latch == b)
1438 return false;
1440 /* It must be possible to eliminate all phi nodes in B. If ssa form
1441 is not up-to-date, we cannot eliminate any phis; however, if only
1442 some symbols as whole are marked for renaming, this is not a problem,
1443 as phi nodes for those symbols are irrelevant in updating anyway. */
1444 phis = phi_nodes (b);
1445 if (!gimple_seq_empty_p (phis))
1447 gimple_stmt_iterator i;
1449 if (name_mappings_registered_p ())
1450 return false;
1452 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1454 gimple phi = gsi_stmt (i);
1456 if (!is_gimple_reg (gimple_phi_result (phi))
1457 && !may_propagate_copy (gimple_phi_result (phi),
1458 gimple_phi_arg_def (phi, 0)))
1459 return false;
1463 return true;
1466 /* Return true if the var whose chain of uses starts at PTR has no
1467 nondebug uses. */
1468 bool
1469 has_zero_uses_1 (const ssa_use_operand_t *head)
1471 const ssa_use_operand_t *ptr;
1473 for (ptr = head->next; ptr != head; ptr = ptr->next)
1474 if (!is_gimple_debug (USE_STMT (ptr)))
1475 return false;
1477 return true;
1480 /* Return true if the var whose chain of uses starts at PTR has a
1481 single nondebug use. Set USE_P and STMT to that single nondebug
1482 use, if so, or to NULL otherwise. */
1483 bool
1484 single_imm_use_1 (const ssa_use_operand_t *head,
1485 use_operand_p *use_p, gimple *stmt)
1487 ssa_use_operand_t *ptr, *single_use = 0;
1489 for (ptr = head->next; ptr != head; ptr = ptr->next)
1490 if (!is_gimple_debug (USE_STMT (ptr)))
1492 if (single_use)
1494 single_use = NULL;
1495 break;
1497 single_use = ptr;
1500 if (use_p)
1501 *use_p = single_use;
1503 if (stmt)
1504 *stmt = single_use ? single_use->loc.stmt : NULL;
1506 return !!single_use;
1509 /* Replaces all uses of NAME by VAL. */
1511 void
1512 replace_uses_by (tree name, tree val)
1514 imm_use_iterator imm_iter;
1515 use_operand_p use;
1516 gimple stmt;
1517 edge e;
1519 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1521 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1523 replace_exp (use, val);
1525 if (gimple_code (stmt) == GIMPLE_PHI)
1527 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1528 if (e->flags & EDGE_ABNORMAL)
1530 /* This can only occur for virtual operands, since
1531 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1532 would prevent replacement. */
1533 gcc_assert (!is_gimple_reg (name));
1534 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1539 if (gimple_code (stmt) != GIMPLE_PHI)
1541 size_t i;
1543 fold_stmt_inplace (stmt);
1544 if (cfgcleanup_altered_bbs)
1545 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1547 /* FIXME. This should go in update_stmt. */
1548 for (i = 0; i < gimple_num_ops (stmt); i++)
1550 tree op = gimple_op (stmt, i);
1551 /* Operands may be empty here. For example, the labels
1552 of a GIMPLE_COND are nulled out following the creation
1553 of the corresponding CFG edges. */
1554 if (op && TREE_CODE (op) == ADDR_EXPR)
1555 recompute_tree_invariant_for_addr_expr (op);
1558 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1559 update_stmt (stmt);
1563 gcc_assert (has_zero_uses (name));
1565 /* Also update the trees stored in loop structures. */
1566 if (current_loops)
1568 struct loop *loop;
1569 loop_iterator li;
1571 FOR_EACH_LOOP (li, loop, 0)
1573 substitute_in_loop_info (loop, name, val);
1578 /* Merge block B into block A. */
1580 static void
1581 gimple_merge_blocks (basic_block a, basic_block b)
1583 gimple_stmt_iterator last, gsi, psi;
1584 gimple_seq phis = phi_nodes (b);
1586 if (dump_file)
1587 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1589 /* Remove all single-valued PHI nodes from block B of the form
1590 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1591 gsi = gsi_last_bb (a);
1592 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1594 gimple phi = gsi_stmt (psi);
1595 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1596 gimple copy;
1597 bool may_replace_uses = !is_gimple_reg (def)
1598 || may_propagate_copy (def, use);
1600 /* In case we maintain loop closed ssa form, do not propagate arguments
1601 of loop exit phi nodes. */
1602 if (current_loops
1603 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1604 && is_gimple_reg (def)
1605 && TREE_CODE (use) == SSA_NAME
1606 && a->loop_father != b->loop_father)
1607 may_replace_uses = false;
1609 if (!may_replace_uses)
1611 gcc_assert (is_gimple_reg (def));
1613 /* Note that just emitting the copies is fine -- there is no problem
1614 with ordering of phi nodes. This is because A is the single
1615 predecessor of B, therefore results of the phi nodes cannot
1616 appear as arguments of the phi nodes. */
1617 copy = gimple_build_assign (def, use);
1618 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1619 remove_phi_node (&psi, false);
1621 else
1623 /* If we deal with a PHI for virtual operands, we can simply
1624 propagate these without fussing with folding or updating
1625 the stmt. */
1626 if (!is_gimple_reg (def))
1628 imm_use_iterator iter;
1629 use_operand_p use_p;
1630 gimple stmt;
1632 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1633 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1634 SET_USE (use_p, use);
1636 else
1637 replace_uses_by (def, use);
1639 remove_phi_node (&psi, true);
1643 /* Ensure that B follows A. */
1644 move_block_after (b, a);
1646 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1647 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1649 /* Remove labels from B and set gimple_bb to A for other statements. */
1650 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1652 gimple stmt = gsi_stmt (gsi);
1653 if (gimple_code (stmt) == GIMPLE_LABEL)
1655 tree label = gimple_label_label (stmt);
1656 int lp_nr;
1658 gsi_remove (&gsi, false);
1660 /* Now that we can thread computed gotos, we might have
1661 a situation where we have a forced label in block B
1662 However, the label at the start of block B might still be
1663 used in other ways (think about the runtime checking for
1664 Fortran assigned gotos). So we can not just delete the
1665 label. Instead we move the label to the start of block A. */
1666 if (FORCED_LABEL (label))
1668 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1669 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1672 lp_nr = EH_LANDING_PAD_NR (label);
1673 if (lp_nr)
1675 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1676 lp->post_landing_pad = NULL;
1679 else
1681 gimple_set_bb (stmt, a);
1682 gsi_next (&gsi);
1686 /* Merge the sequences. */
1687 last = gsi_last_bb (a);
1688 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1689 set_bb_seq (b, NULL);
1691 if (cfgcleanup_altered_bbs)
1692 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1696 /* Return the one of two successors of BB that is not reachable by a
1697 complex edge, if there is one. Else, return BB. We use
1698 this in optimizations that use post-dominators for their heuristics,
1699 to catch the cases in C++ where function calls are involved. */
1701 basic_block
1702 single_noncomplex_succ (basic_block bb)
1704 edge e0, e1;
1705 if (EDGE_COUNT (bb->succs) != 2)
1706 return bb;
1708 e0 = EDGE_SUCC (bb, 0);
1709 e1 = EDGE_SUCC (bb, 1);
1710 if (e0->flags & EDGE_COMPLEX)
1711 return e1->dest;
1712 if (e1->flags & EDGE_COMPLEX)
1713 return e0->dest;
1715 return bb;
1718 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1720 void
1721 notice_special_calls (gimple call)
1723 int flags = gimple_call_flags (call);
1725 if (flags & ECF_MAY_BE_ALLOCA)
1726 cfun->calls_alloca = true;
1727 if (flags & ECF_RETURNS_TWICE)
1728 cfun->calls_setjmp = true;
1732 /* Clear flags set by notice_special_calls. Used by dead code removal
1733 to update the flags. */
1735 void
1736 clear_special_calls (void)
1738 cfun->calls_alloca = false;
1739 cfun->calls_setjmp = false;
1742 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1744 static void
1745 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1747 /* Since this block is no longer reachable, we can just delete all
1748 of its PHI nodes. */
1749 remove_phi_nodes (bb);
1751 /* Remove edges to BB's successors. */
1752 while (EDGE_COUNT (bb->succs) > 0)
1753 remove_edge (EDGE_SUCC (bb, 0));
1757 /* Remove statements of basic block BB. */
1759 static void
1760 remove_bb (basic_block bb)
1762 gimple_stmt_iterator i;
1764 if (dump_file)
1766 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1767 if (dump_flags & TDF_DETAILS)
1769 dump_bb (bb, dump_file, 0);
1770 fprintf (dump_file, "\n");
1774 if (current_loops)
1776 struct loop *loop = bb->loop_father;
1778 /* If a loop gets removed, clean up the information associated
1779 with it. */
1780 if (loop->latch == bb
1781 || loop->header == bb)
1782 free_numbers_of_iterations_estimates_loop (loop);
1785 /* Remove all the instructions in the block. */
1786 if (bb_seq (bb) != NULL)
1788 /* Walk backwards so as to get a chance to substitute all
1789 released DEFs into debug stmts. See
1790 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1791 details. */
1792 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1794 gimple stmt = gsi_stmt (i);
1795 if (gimple_code (stmt) == GIMPLE_LABEL
1796 && (FORCED_LABEL (gimple_label_label (stmt))
1797 || DECL_NONLOCAL (gimple_label_label (stmt))))
1799 basic_block new_bb;
1800 gimple_stmt_iterator new_gsi;
1802 /* A non-reachable non-local label may still be referenced.
1803 But it no longer needs to carry the extra semantics of
1804 non-locality. */
1805 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1807 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1808 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1811 new_bb = bb->prev_bb;
1812 new_gsi = gsi_start_bb (new_bb);
1813 gsi_remove (&i, false);
1814 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1816 else
1818 /* Release SSA definitions if we are in SSA. Note that we
1819 may be called when not in SSA. For example,
1820 final_cleanup calls this function via
1821 cleanup_tree_cfg. */
1822 if (gimple_in_ssa_p (cfun))
1823 release_defs (stmt);
1825 gsi_remove (&i, true);
1828 if (gsi_end_p (i))
1829 i = gsi_last_bb (bb);
1830 else
1831 gsi_prev (&i);
1835 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1836 bb->il.gimple = NULL;
1840 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1841 predicate VAL, return the edge that will be taken out of the block.
1842 If VAL does not match a unique edge, NULL is returned. */
1844 edge
1845 find_taken_edge (basic_block bb, tree val)
1847 gimple stmt;
1849 stmt = last_stmt (bb);
1851 gcc_assert (stmt);
1852 gcc_assert (is_ctrl_stmt (stmt));
1854 if (val == NULL)
1855 return NULL;
1857 if (!is_gimple_min_invariant (val))
1858 return NULL;
1860 if (gimple_code (stmt) == GIMPLE_COND)
1861 return find_taken_edge_cond_expr (bb, val);
1863 if (gimple_code (stmt) == GIMPLE_SWITCH)
1864 return find_taken_edge_switch_expr (bb, val);
1866 if (computed_goto_p (stmt))
1868 /* Only optimize if the argument is a label, if the argument is
1869 not a label then we can not construct a proper CFG.
1871 It may be the case that we only need to allow the LABEL_REF to
1872 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1873 appear inside a LABEL_EXPR just to be safe. */
1874 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1875 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1876 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1877 return NULL;
1880 gcc_unreachable ();
1883 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1884 statement, determine which of the outgoing edges will be taken out of the
1885 block. Return NULL if either edge may be taken. */
1887 static edge
1888 find_taken_edge_computed_goto (basic_block bb, tree val)
1890 basic_block dest;
1891 edge e = NULL;
1893 dest = label_to_block (val);
1894 if (dest)
1896 e = find_edge (bb, dest);
1897 gcc_assert (e != NULL);
1900 return e;
1903 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1904 statement, determine which of the two edges will be taken out of the
1905 block. Return NULL if either edge may be taken. */
1907 static edge
1908 find_taken_edge_cond_expr (basic_block bb, tree val)
1910 edge true_edge, false_edge;
1912 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1914 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1915 return (integer_zerop (val) ? false_edge : true_edge);
1918 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1919 statement, determine which edge will be taken out of the block. Return
1920 NULL if any edge may be taken. */
1922 static edge
1923 find_taken_edge_switch_expr (basic_block bb, tree val)
1925 basic_block dest_bb;
1926 edge e;
1927 gimple switch_stmt;
1928 tree taken_case;
1930 switch_stmt = last_stmt (bb);
1931 taken_case = find_case_label_for_value (switch_stmt, val);
1932 dest_bb = label_to_block (CASE_LABEL (taken_case));
1934 e = find_edge (bb, dest_bb);
1935 gcc_assert (e);
1936 return e;
1940 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1941 We can make optimal use here of the fact that the case labels are
1942 sorted: We can do a binary search for a case matching VAL. */
1944 static tree
1945 find_case_label_for_value (gimple switch_stmt, tree val)
1947 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1948 tree default_case = gimple_switch_default_label (switch_stmt);
1950 for (low = 0, high = n; high - low > 1; )
1952 size_t i = (high + low) / 2;
1953 tree t = gimple_switch_label (switch_stmt, i);
1954 int cmp;
1956 /* Cache the result of comparing CASE_LOW and val. */
1957 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1959 if (cmp > 0)
1960 high = i;
1961 else
1962 low = i;
1964 if (CASE_HIGH (t) == NULL)
1966 /* A singe-valued case label. */
1967 if (cmp == 0)
1968 return t;
1970 else
1972 /* A case range. We can only handle integer ranges. */
1973 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1974 return t;
1978 return default_case;
1982 /* Dump a basic block on stderr. */
1984 void
1985 gimple_debug_bb (basic_block bb)
1987 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
1991 /* Dump basic block with index N on stderr. */
1993 basic_block
1994 gimple_debug_bb_n (int n)
1996 gimple_debug_bb (BASIC_BLOCK (n));
1997 return BASIC_BLOCK (n);
2001 /* Dump the CFG on stderr.
2003 FLAGS are the same used by the tree dumping functions
2004 (see TDF_* in tree-pass.h). */
2006 void
2007 gimple_debug_cfg (int flags)
2009 gimple_dump_cfg (stderr, flags);
2013 /* Dump the program showing basic block boundaries on the given FILE.
2015 FLAGS are the same used by the tree dumping functions (see TDF_* in
2016 tree.h). */
2018 void
2019 gimple_dump_cfg (FILE *file, int flags)
2021 if (flags & TDF_DETAILS)
2023 const char *funcname
2024 = lang_hooks.decl_printable_name (current_function_decl, 2);
2026 fputc ('\n', file);
2027 fprintf (file, ";; Function %s\n\n", funcname);
2028 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2029 n_basic_blocks, n_edges, last_basic_block);
2031 brief_dump_cfg (file);
2032 fprintf (file, "\n");
2035 if (flags & TDF_STATS)
2036 dump_cfg_stats (file);
2038 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2042 /* Dump CFG statistics on FILE. */
2044 void
2045 dump_cfg_stats (FILE *file)
2047 static long max_num_merged_labels = 0;
2048 unsigned long size, total = 0;
2049 long num_edges;
2050 basic_block bb;
2051 const char * const fmt_str = "%-30s%-13s%12s\n";
2052 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2053 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2054 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2055 const char *funcname
2056 = lang_hooks.decl_printable_name (current_function_decl, 2);
2059 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2061 fprintf (file, "---------------------------------------------------------\n");
2062 fprintf (file, fmt_str, "", " Number of ", "Memory");
2063 fprintf (file, fmt_str, "", " instances ", "used ");
2064 fprintf (file, "---------------------------------------------------------\n");
2066 size = n_basic_blocks * sizeof (struct basic_block_def);
2067 total += size;
2068 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2069 SCALE (size), LABEL (size));
2071 num_edges = 0;
2072 FOR_EACH_BB (bb)
2073 num_edges += EDGE_COUNT (bb->succs);
2074 size = num_edges * sizeof (struct edge_def);
2075 total += size;
2076 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2078 fprintf (file, "---------------------------------------------------------\n");
2079 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2080 LABEL (total));
2081 fprintf (file, "---------------------------------------------------------\n");
2082 fprintf (file, "\n");
2084 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2085 max_num_merged_labels = cfg_stats.num_merged_labels;
2087 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2088 cfg_stats.num_merged_labels, max_num_merged_labels);
2090 fprintf (file, "\n");
2094 /* Dump CFG statistics on stderr. Keep extern so that it's always
2095 linked in the final executable. */
2097 void
2098 debug_cfg_stats (void)
2100 dump_cfg_stats (stderr);
2104 /* Dump the flowgraph to a .vcg FILE. */
2106 static void
2107 gimple_cfg2vcg (FILE *file)
2109 edge e;
2110 edge_iterator ei;
2111 basic_block bb;
2112 const char *funcname
2113 = lang_hooks.decl_printable_name (current_function_decl, 2);
2115 /* Write the file header. */
2116 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2117 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2118 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2120 /* Write blocks and edges. */
2121 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2123 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2124 e->dest->index);
2126 if (e->flags & EDGE_FAKE)
2127 fprintf (file, " linestyle: dotted priority: 10");
2128 else
2129 fprintf (file, " linestyle: solid priority: 100");
2131 fprintf (file, " }\n");
2133 fputc ('\n', file);
2135 FOR_EACH_BB (bb)
2137 enum gimple_code head_code, end_code;
2138 const char *head_name, *end_name;
2139 int head_line = 0;
2140 int end_line = 0;
2141 gimple first = first_stmt (bb);
2142 gimple last = last_stmt (bb);
2144 if (first)
2146 head_code = gimple_code (first);
2147 head_name = gimple_code_name[head_code];
2148 head_line = get_lineno (first);
2150 else
2151 head_name = "no-statement";
2153 if (last)
2155 end_code = gimple_code (last);
2156 end_name = gimple_code_name[end_code];
2157 end_line = get_lineno (last);
2159 else
2160 end_name = "no-statement";
2162 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2163 bb->index, bb->index, head_name, head_line, end_name,
2164 end_line);
2166 FOR_EACH_EDGE (e, ei, bb->succs)
2168 if (e->dest == EXIT_BLOCK_PTR)
2169 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2170 else
2171 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2173 if (e->flags & EDGE_FAKE)
2174 fprintf (file, " priority: 10 linestyle: dotted");
2175 else
2176 fprintf (file, " priority: 100 linestyle: solid");
2178 fprintf (file, " }\n");
2181 if (bb->next_bb != EXIT_BLOCK_PTR)
2182 fputc ('\n', file);
2185 fputs ("}\n\n", file);
2190 /*---------------------------------------------------------------------------
2191 Miscellaneous helpers
2192 ---------------------------------------------------------------------------*/
2194 /* Return true if T represents a stmt that always transfers control. */
2196 bool
2197 is_ctrl_stmt (gimple t)
2199 switch (gimple_code (t))
2201 case GIMPLE_COND:
2202 case GIMPLE_SWITCH:
2203 case GIMPLE_GOTO:
2204 case GIMPLE_RETURN:
2205 case GIMPLE_RESX:
2206 return true;
2207 default:
2208 return false;
2213 /* Return true if T is a statement that may alter the flow of control
2214 (e.g., a call to a non-returning function). */
2216 bool
2217 is_ctrl_altering_stmt (gimple t)
2219 gcc_assert (t);
2221 switch (gimple_code (t))
2223 case GIMPLE_CALL:
2225 int flags = gimple_call_flags (t);
2227 /* A non-pure/const call alters flow control if the current
2228 function has nonlocal labels. */
2229 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2230 return true;
2232 /* A call also alters control flow if it does not return. */
2233 if (flags & ECF_NORETURN)
2234 return true;
2236 break;
2238 case GIMPLE_EH_DISPATCH:
2239 /* EH_DISPATCH branches to the individual catch handlers at
2240 this level of a try or allowed-exceptions region. It can
2241 fallthru to the next statement as well. */
2242 return true;
2244 case GIMPLE_ASM:
2245 if (gimple_asm_nlabels (t) > 0)
2246 return true;
2247 break;
2249 CASE_GIMPLE_OMP:
2250 /* OpenMP directives alter control flow. */
2251 return true;
2253 default:
2254 break;
2257 /* If a statement can throw, it alters control flow. */
2258 return stmt_can_throw_internal (t);
2262 /* Return true if T is a simple local goto. */
2264 bool
2265 simple_goto_p (gimple t)
2267 return (gimple_code (t) == GIMPLE_GOTO
2268 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2272 /* Return true if T can make an abnormal transfer of control flow.
2273 Transfers of control flow associated with EH are excluded. */
2275 bool
2276 stmt_can_make_abnormal_goto (gimple t)
2278 if (computed_goto_p (t))
2279 return true;
2280 if (is_gimple_call (t))
2281 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2282 return false;
2286 /* Return true if STMT should start a new basic block. PREV_STMT is
2287 the statement preceding STMT. It is used when STMT is a label or a
2288 case label. Labels should only start a new basic block if their
2289 previous statement wasn't a label. Otherwise, sequence of labels
2290 would generate unnecessary basic blocks that only contain a single
2291 label. */
2293 static inline bool
2294 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2296 if (stmt == NULL)
2297 return false;
2299 /* Labels start a new basic block only if the preceding statement
2300 wasn't a label of the same type. This prevents the creation of
2301 consecutive blocks that have nothing but a single label. */
2302 if (gimple_code (stmt) == GIMPLE_LABEL)
2304 /* Nonlocal and computed GOTO targets always start a new block. */
2305 if (DECL_NONLOCAL (gimple_label_label (stmt))
2306 || FORCED_LABEL (gimple_label_label (stmt)))
2307 return true;
2309 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2311 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2312 return true;
2314 cfg_stats.num_merged_labels++;
2315 return false;
2317 else
2318 return true;
2321 return false;
2325 /* Return true if T should end a basic block. */
2327 bool
2328 stmt_ends_bb_p (gimple t)
2330 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2333 /* Remove block annotations and other data structures. */
2335 void
2336 delete_tree_cfg_annotations (void)
2338 label_to_block_map = NULL;
2342 /* Return the first statement in basic block BB. */
2344 gimple
2345 first_stmt (basic_block bb)
2347 gimple_stmt_iterator i = gsi_start_bb (bb);
2348 gimple stmt = NULL;
2350 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2352 gsi_next (&i);
2353 stmt = NULL;
2355 return stmt;
2358 /* Return the first non-label statement in basic block BB. */
2360 static gimple
2361 first_non_label_stmt (basic_block bb)
2363 gimple_stmt_iterator i = gsi_start_bb (bb);
2364 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2365 gsi_next (&i);
2366 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2369 /* Return the last statement in basic block BB. */
2371 gimple
2372 last_stmt (basic_block bb)
2374 gimple_stmt_iterator i = gsi_last_bb (bb);
2375 gimple stmt = NULL;
2377 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2379 gsi_prev (&i);
2380 stmt = NULL;
2382 return stmt;
2385 /* Return the last statement of an otherwise empty block. Return NULL
2386 if the block is totally empty, or if it contains more than one
2387 statement. */
2389 gimple
2390 last_and_only_stmt (basic_block bb)
2392 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2393 gimple last, prev;
2395 if (gsi_end_p (i))
2396 return NULL;
2398 last = gsi_stmt (i);
2399 gsi_prev_nondebug (&i);
2400 if (gsi_end_p (i))
2401 return last;
2403 /* Empty statements should no longer appear in the instruction stream.
2404 Everything that might have appeared before should be deleted by
2405 remove_useless_stmts, and the optimizers should just gsi_remove
2406 instead of smashing with build_empty_stmt.
2408 Thus the only thing that should appear here in a block containing
2409 one executable statement is a label. */
2410 prev = gsi_stmt (i);
2411 if (gimple_code (prev) == GIMPLE_LABEL)
2412 return last;
2413 else
2414 return NULL;
2417 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2419 static void
2420 reinstall_phi_args (edge new_edge, edge old_edge)
2422 edge_var_map_vector v;
2423 edge_var_map *vm;
2424 int i;
2425 gimple_stmt_iterator phis;
2427 v = redirect_edge_var_map_vector (old_edge);
2428 if (!v)
2429 return;
2431 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2432 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2433 i++, gsi_next (&phis))
2435 gimple phi = gsi_stmt (phis);
2436 tree result = redirect_edge_var_map_result (vm);
2437 tree arg = redirect_edge_var_map_def (vm);
2439 gcc_assert (result == gimple_phi_result (phi));
2441 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2444 redirect_edge_var_map_clear (old_edge);
2447 /* Returns the basic block after which the new basic block created
2448 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2449 near its "logical" location. This is of most help to humans looking
2450 at debugging dumps. */
2452 static basic_block
2453 split_edge_bb_loc (edge edge_in)
2455 basic_block dest = edge_in->dest;
2456 basic_block dest_prev = dest->prev_bb;
2458 if (dest_prev)
2460 edge e = find_edge (dest_prev, dest);
2461 if (e && !(e->flags & EDGE_COMPLEX))
2462 return edge_in->src;
2464 return dest_prev;
2467 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2468 Abort on abnormal edges. */
2470 static basic_block
2471 gimple_split_edge (edge edge_in)
2473 basic_block new_bb, after_bb, dest;
2474 edge new_edge, e;
2476 /* Abnormal edges cannot be split. */
2477 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2479 dest = edge_in->dest;
2481 after_bb = split_edge_bb_loc (edge_in);
2483 new_bb = create_empty_bb (after_bb);
2484 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2485 new_bb->count = edge_in->count;
2486 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2487 new_edge->probability = REG_BR_PROB_BASE;
2488 new_edge->count = edge_in->count;
2490 e = redirect_edge_and_branch (edge_in, new_bb);
2491 gcc_assert (e == edge_in);
2492 reinstall_phi_args (new_edge, e);
2494 return new_bb;
2497 /* Callback for walk_tree, check that all elements with address taken are
2498 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2499 inside a PHI node. */
2501 static tree
2502 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2504 tree t = *tp, x;
2506 if (TYPE_P (t))
2507 *walk_subtrees = 0;
2509 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2510 #define CHECK_OP(N, MSG) \
2511 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2512 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2514 switch (TREE_CODE (t))
2516 case SSA_NAME:
2517 if (SSA_NAME_IN_FREE_LIST (t))
2519 error ("SSA name in freelist but still referenced");
2520 return *tp;
2522 break;
2524 case INDIRECT_REF:
2525 x = TREE_OPERAND (t, 0);
2526 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2528 error ("Indirect reference's operand is not a register or a constant.");
2529 return x;
2531 break;
2533 case ASSERT_EXPR:
2534 x = fold (ASSERT_EXPR_COND (t));
2535 if (x == boolean_false_node)
2537 error ("ASSERT_EXPR with an always-false condition");
2538 return *tp;
2540 break;
2542 case MODIFY_EXPR:
2543 error ("MODIFY_EXPR not expected while having tuples.");
2544 return *tp;
2546 case ADDR_EXPR:
2548 bool old_constant;
2549 bool old_side_effects;
2550 bool new_constant;
2551 bool new_side_effects;
2553 gcc_assert (is_gimple_address (t));
2555 old_constant = TREE_CONSTANT (t);
2556 old_side_effects = TREE_SIDE_EFFECTS (t);
2558 recompute_tree_invariant_for_addr_expr (t);
2559 new_side_effects = TREE_SIDE_EFFECTS (t);
2560 new_constant = TREE_CONSTANT (t);
2562 if (old_constant != new_constant)
2564 error ("constant not recomputed when ADDR_EXPR changed");
2565 return t;
2567 if (old_side_effects != new_side_effects)
2569 error ("side effects not recomputed when ADDR_EXPR changed");
2570 return t;
2573 /* Skip any references (they will be checked when we recurse down the
2574 tree) and ensure that any variable used as a prefix is marked
2575 addressable. */
2576 for (x = TREE_OPERAND (t, 0);
2577 handled_component_p (x);
2578 x = TREE_OPERAND (x, 0))
2581 if (!(TREE_CODE (x) == VAR_DECL
2582 || TREE_CODE (x) == PARM_DECL
2583 || TREE_CODE (x) == RESULT_DECL))
2584 return NULL;
2585 if (!TREE_ADDRESSABLE (x))
2587 error ("address taken, but ADDRESSABLE bit not set");
2588 return x;
2590 if (DECL_GIMPLE_REG_P (x))
2592 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2593 return x;
2596 break;
2599 case COND_EXPR:
2600 x = COND_EXPR_COND (t);
2601 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2603 error ("non-integral used in condition");
2604 return x;
2606 if (!is_gimple_condexpr (x))
2608 error ("invalid conditional operand");
2609 return x;
2611 break;
2613 case NON_LVALUE_EXPR:
2614 gcc_unreachable ();
2616 CASE_CONVERT:
2617 case FIX_TRUNC_EXPR:
2618 case FLOAT_EXPR:
2619 case NEGATE_EXPR:
2620 case ABS_EXPR:
2621 case BIT_NOT_EXPR:
2622 case TRUTH_NOT_EXPR:
2623 CHECK_OP (0, "invalid operand to unary operator");
2624 break;
2626 case REALPART_EXPR:
2627 case IMAGPART_EXPR:
2628 case COMPONENT_REF:
2629 case ARRAY_REF:
2630 case ARRAY_RANGE_REF:
2631 case BIT_FIELD_REF:
2632 case VIEW_CONVERT_EXPR:
2633 /* We have a nest of references. Verify that each of the operands
2634 that determine where to reference is either a constant or a variable,
2635 verify that the base is valid, and then show we've already checked
2636 the subtrees. */
2637 while (handled_component_p (t))
2639 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2640 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2641 else if (TREE_CODE (t) == ARRAY_REF
2642 || TREE_CODE (t) == ARRAY_RANGE_REF)
2644 CHECK_OP (1, "invalid array index");
2645 if (TREE_OPERAND (t, 2))
2646 CHECK_OP (2, "invalid array lower bound");
2647 if (TREE_OPERAND (t, 3))
2648 CHECK_OP (3, "invalid array stride");
2650 else if (TREE_CODE (t) == BIT_FIELD_REF)
2652 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2653 || !host_integerp (TREE_OPERAND (t, 2), 1))
2655 error ("invalid position or size operand to BIT_FIELD_REF");
2656 return t;
2658 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2659 && (TYPE_PRECISION (TREE_TYPE (t))
2660 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2662 error ("integral result type precision does not match "
2663 "field size of BIT_FIELD_REF");
2664 return t;
2666 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2667 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2668 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2670 error ("mode precision of non-integral result does not "
2671 "match field size of BIT_FIELD_REF");
2672 return t;
2676 t = TREE_OPERAND (t, 0);
2679 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2681 error ("invalid reference prefix");
2682 return t;
2684 *walk_subtrees = 0;
2685 break;
2686 case PLUS_EXPR:
2687 case MINUS_EXPR:
2688 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2689 POINTER_PLUS_EXPR. */
2690 if (POINTER_TYPE_P (TREE_TYPE (t)))
2692 error ("invalid operand to plus/minus, type is a pointer");
2693 return t;
2695 CHECK_OP (0, "invalid operand to binary operator");
2696 CHECK_OP (1, "invalid operand to binary operator");
2697 break;
2699 case POINTER_PLUS_EXPR:
2700 /* Check to make sure the first operand is a pointer or reference type. */
2701 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2703 error ("invalid operand to pointer plus, first operand is not a pointer");
2704 return t;
2706 /* Check to make sure the second operand is an integer with type of
2707 sizetype. */
2708 if (!useless_type_conversion_p (sizetype,
2709 TREE_TYPE (TREE_OPERAND (t, 1))))
2711 error ("invalid operand to pointer plus, second operand is not an "
2712 "integer with type of sizetype.");
2713 return t;
2715 /* FALLTHROUGH */
2716 case LT_EXPR:
2717 case LE_EXPR:
2718 case GT_EXPR:
2719 case GE_EXPR:
2720 case EQ_EXPR:
2721 case NE_EXPR:
2722 case UNORDERED_EXPR:
2723 case ORDERED_EXPR:
2724 case UNLT_EXPR:
2725 case UNLE_EXPR:
2726 case UNGT_EXPR:
2727 case UNGE_EXPR:
2728 case UNEQ_EXPR:
2729 case LTGT_EXPR:
2730 case MULT_EXPR:
2731 case TRUNC_DIV_EXPR:
2732 case CEIL_DIV_EXPR:
2733 case FLOOR_DIV_EXPR:
2734 case ROUND_DIV_EXPR:
2735 case TRUNC_MOD_EXPR:
2736 case CEIL_MOD_EXPR:
2737 case FLOOR_MOD_EXPR:
2738 case ROUND_MOD_EXPR:
2739 case RDIV_EXPR:
2740 case EXACT_DIV_EXPR:
2741 case MIN_EXPR:
2742 case MAX_EXPR:
2743 case LSHIFT_EXPR:
2744 case RSHIFT_EXPR:
2745 case LROTATE_EXPR:
2746 case RROTATE_EXPR:
2747 case BIT_IOR_EXPR:
2748 case BIT_XOR_EXPR:
2749 case BIT_AND_EXPR:
2750 CHECK_OP (0, "invalid operand to binary operator");
2751 CHECK_OP (1, "invalid operand to binary operator");
2752 break;
2754 case CONSTRUCTOR:
2755 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2756 *walk_subtrees = 0;
2757 break;
2759 default:
2760 break;
2762 return NULL;
2764 #undef CHECK_OP
2768 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2769 Returns true if there is an error, otherwise false. */
2771 static bool
2772 verify_types_in_gimple_min_lval (tree expr)
2774 tree op;
2776 if (is_gimple_id (expr))
2777 return false;
2779 if (!INDIRECT_REF_P (expr)
2780 && TREE_CODE (expr) != TARGET_MEM_REF)
2782 error ("invalid expression for min lvalue");
2783 return true;
2786 /* TARGET_MEM_REFs are strange beasts. */
2787 if (TREE_CODE (expr) == TARGET_MEM_REF)
2788 return false;
2790 op = TREE_OPERAND (expr, 0);
2791 if (!is_gimple_val (op))
2793 error ("invalid operand in indirect reference");
2794 debug_generic_stmt (op);
2795 return true;
2797 if (!useless_type_conversion_p (TREE_TYPE (expr),
2798 TREE_TYPE (TREE_TYPE (op))))
2800 error ("type mismatch in indirect reference");
2801 debug_generic_stmt (TREE_TYPE (expr));
2802 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2803 return true;
2806 return false;
2809 /* Verify if EXPR is a valid GIMPLE reference expression. If
2810 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2811 if there is an error, otherwise false. */
2813 static bool
2814 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2816 while (handled_component_p (expr))
2818 tree op = TREE_OPERAND (expr, 0);
2820 if (TREE_CODE (expr) == ARRAY_REF
2821 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2823 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2824 || (TREE_OPERAND (expr, 2)
2825 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2826 || (TREE_OPERAND (expr, 3)
2827 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2829 error ("invalid operands to array reference");
2830 debug_generic_stmt (expr);
2831 return true;
2835 /* Verify if the reference array element types are compatible. */
2836 if (TREE_CODE (expr) == ARRAY_REF
2837 && !useless_type_conversion_p (TREE_TYPE (expr),
2838 TREE_TYPE (TREE_TYPE (op))))
2840 error ("type mismatch in array reference");
2841 debug_generic_stmt (TREE_TYPE (expr));
2842 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2843 return true;
2845 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2846 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2847 TREE_TYPE (TREE_TYPE (op))))
2849 error ("type mismatch in array range reference");
2850 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2851 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2852 return true;
2855 if ((TREE_CODE (expr) == REALPART_EXPR
2856 || TREE_CODE (expr) == IMAGPART_EXPR)
2857 && !useless_type_conversion_p (TREE_TYPE (expr),
2858 TREE_TYPE (TREE_TYPE (op))))
2860 error ("type mismatch in real/imagpart reference");
2861 debug_generic_stmt (TREE_TYPE (expr));
2862 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2863 return true;
2866 if (TREE_CODE (expr) == COMPONENT_REF
2867 && !useless_type_conversion_p (TREE_TYPE (expr),
2868 TREE_TYPE (TREE_OPERAND (expr, 1))))
2870 error ("type mismatch in component reference");
2871 debug_generic_stmt (TREE_TYPE (expr));
2872 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2873 return true;
2876 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2878 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2879 that their operand is not an SSA name or an invariant when
2880 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2881 bug). Otherwise there is nothing to verify, gross mismatches at
2882 most invoke undefined behavior. */
2883 if (require_lvalue
2884 && (TREE_CODE (op) == SSA_NAME
2885 || is_gimple_min_invariant (op)))
2887 error ("Conversion of an SSA_NAME on the left hand side.");
2888 debug_generic_stmt (expr);
2889 return true;
2891 else if (!handled_component_p (op))
2892 return false;
2895 expr = op;
2898 return ((require_lvalue || !is_gimple_min_invariant (expr))
2899 && verify_types_in_gimple_min_lval (expr));
2902 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2903 list of pointer-to types that is trivially convertible to DEST. */
2905 static bool
2906 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2908 tree src;
2910 if (!TYPE_POINTER_TO (src_obj))
2911 return true;
2913 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2914 if (useless_type_conversion_p (dest, src))
2915 return true;
2917 return false;
2920 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2921 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2923 static bool
2924 valid_fixed_convert_types_p (tree type1, tree type2)
2926 return (FIXED_POINT_TYPE_P (type1)
2927 && (INTEGRAL_TYPE_P (type2)
2928 || SCALAR_FLOAT_TYPE_P (type2)
2929 || FIXED_POINT_TYPE_P (type2)));
2932 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2933 is a problem, otherwise false. */
2935 static bool
2936 verify_gimple_call (gimple stmt)
2938 tree fn = gimple_call_fn (stmt);
2939 tree fntype;
2940 unsigned i;
2942 if (TREE_CODE (fn) != OBJ_TYPE_REF
2943 && !is_gimple_val (fn))
2945 error ("invalid function in gimple call");
2946 debug_generic_stmt (fn);
2947 return true;
2950 if (!POINTER_TYPE_P (TREE_TYPE (fn))
2951 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2952 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2954 error ("non-function in gimple call");
2955 return true;
2958 if (gimple_call_lhs (stmt)
2959 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2960 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2962 error ("invalid LHS in gimple call");
2963 return true;
2966 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2968 error ("LHS in noreturn call");
2969 return true;
2972 fntype = TREE_TYPE (TREE_TYPE (fn));
2973 if (gimple_call_lhs (stmt)
2974 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
2975 TREE_TYPE (fntype))
2976 /* ??? At least C++ misses conversions at assignments from
2977 void * call results.
2978 ??? Java is completely off. Especially with functions
2979 returning java.lang.Object.
2980 For now simply allow arbitrary pointer type conversions. */
2981 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
2982 && POINTER_TYPE_P (TREE_TYPE (fntype))))
2984 error ("invalid conversion in gimple call");
2985 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
2986 debug_generic_stmt (TREE_TYPE (fntype));
2987 return true;
2990 if (gimple_call_chain (stmt)
2991 && !is_gimple_val (gimple_call_chain (stmt)))
2993 error ("invalid static chain in gimple call");
2994 debug_generic_stmt (gimple_call_chain (stmt));
2995 return true;
2998 /* If there is a static chain argument, this should not be an indirect
2999 call, and the decl should have DECL_STATIC_CHAIN set. */
3000 if (gimple_call_chain (stmt))
3002 if (TREE_CODE (fn) != ADDR_EXPR
3003 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3005 error ("static chain in indirect gimple call");
3006 return true;
3008 fn = TREE_OPERAND (fn, 0);
3010 if (!DECL_STATIC_CHAIN (fn))
3012 error ("static chain with function that doesn't use one");
3013 return true;
3017 /* ??? The C frontend passes unpromoted arguments in case it
3018 didn't see a function declaration before the call. So for now
3019 leave the call arguments mostly unverified. Once we gimplify
3020 unit-at-a-time we have a chance to fix this. */
3022 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3024 tree arg = gimple_call_arg (stmt, i);
3025 if (!is_gimple_operand (arg))
3027 error ("invalid argument to gimple call");
3028 debug_generic_expr (arg);
3032 return false;
3035 /* Verifies the gimple comparison with the result type TYPE and
3036 the operands OP0 and OP1. */
3038 static bool
3039 verify_gimple_comparison (tree type, tree op0, tree op1)
3041 tree op0_type = TREE_TYPE (op0);
3042 tree op1_type = TREE_TYPE (op1);
3044 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3046 error ("invalid operands in gimple comparison");
3047 return true;
3050 /* For comparisons we do not have the operations type as the
3051 effective type the comparison is carried out in. Instead
3052 we require that either the first operand is trivially
3053 convertible into the second, or the other way around.
3054 The resulting type of a comparison may be any integral type.
3055 Because we special-case pointers to void we allow
3056 comparisons of pointers with the same mode as well. */
3057 if ((!useless_type_conversion_p (op0_type, op1_type)
3058 && !useless_type_conversion_p (op1_type, op0_type)
3059 && (!POINTER_TYPE_P (op0_type)
3060 || !POINTER_TYPE_P (op1_type)
3061 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3062 || !INTEGRAL_TYPE_P (type))
3064 error ("type mismatch in comparison expression");
3065 debug_generic_expr (type);
3066 debug_generic_expr (op0_type);
3067 debug_generic_expr (op1_type);
3068 return true;
3071 return false;
3074 /* Verify a gimple assignment statement STMT with an unary rhs.
3075 Returns true if anything is wrong. */
3077 static bool
3078 verify_gimple_assign_unary (gimple stmt)
3080 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3081 tree lhs = gimple_assign_lhs (stmt);
3082 tree lhs_type = TREE_TYPE (lhs);
3083 tree rhs1 = gimple_assign_rhs1 (stmt);
3084 tree rhs1_type = TREE_TYPE (rhs1);
3086 if (!is_gimple_reg (lhs)
3087 && !(optimize == 0
3088 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3090 error ("non-register as LHS of unary operation");
3091 return true;
3094 if (!is_gimple_val (rhs1))
3096 error ("invalid operand in unary operation");
3097 return true;
3100 /* First handle conversions. */
3101 switch (rhs_code)
3103 CASE_CONVERT:
3105 /* Allow conversions between integral types and pointers only if
3106 there is no sign or zero extension involved.
3107 For targets were the precision of sizetype doesn't match that
3108 of pointers we need to allow arbitrary conversions from and
3109 to sizetype. */
3110 if ((POINTER_TYPE_P (lhs_type)
3111 && INTEGRAL_TYPE_P (rhs1_type)
3112 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3113 || rhs1_type == sizetype))
3114 || (POINTER_TYPE_P (rhs1_type)
3115 && INTEGRAL_TYPE_P (lhs_type)
3116 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3117 || lhs_type == sizetype)))
3118 return false;
3120 /* Allow conversion from integer to offset type and vice versa. */
3121 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3122 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3123 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3124 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3125 return false;
3127 /* Otherwise assert we are converting between types of the
3128 same kind. */
3129 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3131 error ("invalid types in nop conversion");
3132 debug_generic_expr (lhs_type);
3133 debug_generic_expr (rhs1_type);
3134 return true;
3137 return false;
3140 case ADDR_SPACE_CONVERT_EXPR:
3142 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3143 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3144 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3146 error ("invalid types in address space conversion");
3147 debug_generic_expr (lhs_type);
3148 debug_generic_expr (rhs1_type);
3149 return true;
3152 return false;
3155 case FIXED_CONVERT_EXPR:
3157 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3158 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3160 error ("invalid types in fixed-point conversion");
3161 debug_generic_expr (lhs_type);
3162 debug_generic_expr (rhs1_type);
3163 return true;
3166 return false;
3169 case FLOAT_EXPR:
3171 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3173 error ("invalid types in conversion to floating point");
3174 debug_generic_expr (lhs_type);
3175 debug_generic_expr (rhs1_type);
3176 return true;
3179 return false;
3182 case FIX_TRUNC_EXPR:
3184 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3186 error ("invalid types in conversion to integer");
3187 debug_generic_expr (lhs_type);
3188 debug_generic_expr (rhs1_type);
3189 return true;
3192 return false;
3195 case VEC_UNPACK_HI_EXPR:
3196 case VEC_UNPACK_LO_EXPR:
3197 case REDUC_MAX_EXPR:
3198 case REDUC_MIN_EXPR:
3199 case REDUC_PLUS_EXPR:
3200 case VEC_UNPACK_FLOAT_HI_EXPR:
3201 case VEC_UNPACK_FLOAT_LO_EXPR:
3202 /* FIXME. */
3203 return false;
3205 case TRUTH_NOT_EXPR:
3206 case NEGATE_EXPR:
3207 case ABS_EXPR:
3208 case BIT_NOT_EXPR:
3209 case PAREN_EXPR:
3210 case NON_LVALUE_EXPR:
3211 case CONJ_EXPR:
3212 break;
3214 default:
3215 gcc_unreachable ();
3218 /* For the remaining codes assert there is no conversion involved. */
3219 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3221 error ("non-trivial conversion in unary operation");
3222 debug_generic_expr (lhs_type);
3223 debug_generic_expr (rhs1_type);
3224 return true;
3227 return false;
3230 /* Verify a gimple assignment statement STMT with a binary rhs.
3231 Returns true if anything is wrong. */
3233 static bool
3234 verify_gimple_assign_binary (gimple stmt)
3236 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3237 tree lhs = gimple_assign_lhs (stmt);
3238 tree lhs_type = TREE_TYPE (lhs);
3239 tree rhs1 = gimple_assign_rhs1 (stmt);
3240 tree rhs1_type = TREE_TYPE (rhs1);
3241 tree rhs2 = gimple_assign_rhs2 (stmt);
3242 tree rhs2_type = TREE_TYPE (rhs2);
3244 if (!is_gimple_reg (lhs)
3245 && !(optimize == 0
3246 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3248 error ("non-register as LHS of binary operation");
3249 return true;
3252 if (!is_gimple_val (rhs1)
3253 || !is_gimple_val (rhs2))
3255 error ("invalid operands in binary operation");
3256 return true;
3259 /* First handle operations that involve different types. */
3260 switch (rhs_code)
3262 case COMPLEX_EXPR:
3264 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3265 || !(INTEGRAL_TYPE_P (rhs1_type)
3266 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3267 || !(INTEGRAL_TYPE_P (rhs2_type)
3268 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3270 error ("type mismatch in complex expression");
3271 debug_generic_expr (lhs_type);
3272 debug_generic_expr (rhs1_type);
3273 debug_generic_expr (rhs2_type);
3274 return true;
3277 return false;
3280 case LSHIFT_EXPR:
3281 case RSHIFT_EXPR:
3282 case LROTATE_EXPR:
3283 case RROTATE_EXPR:
3285 /* Shifts and rotates are ok on integral types, fixed point
3286 types and integer vector types. */
3287 if ((!INTEGRAL_TYPE_P (rhs1_type)
3288 && !FIXED_POINT_TYPE_P (rhs1_type)
3289 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3290 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3291 || (!INTEGRAL_TYPE_P (rhs2_type)
3292 /* Vector shifts of vectors are also ok. */
3293 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3294 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3295 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3296 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3297 || !useless_type_conversion_p (lhs_type, rhs1_type))
3299 error ("type mismatch in shift expression");
3300 debug_generic_expr (lhs_type);
3301 debug_generic_expr (rhs1_type);
3302 debug_generic_expr (rhs2_type);
3303 return true;
3306 return false;
3309 case VEC_LSHIFT_EXPR:
3310 case VEC_RSHIFT_EXPR:
3312 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3313 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3314 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3315 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3316 || (!INTEGRAL_TYPE_P (rhs2_type)
3317 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3318 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3319 || !useless_type_conversion_p (lhs_type, rhs1_type))
3321 error ("type mismatch in vector shift expression");
3322 debug_generic_expr (lhs_type);
3323 debug_generic_expr (rhs1_type);
3324 debug_generic_expr (rhs2_type);
3325 return true;
3327 /* For shifting a vector of floating point components we
3328 only allow shifting by a constant multiple of the element size. */
3329 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3330 && (TREE_CODE (rhs2) != INTEGER_CST
3331 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3332 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3334 error ("non-element sized vector shift of floating point vector");
3335 return true;
3338 return false;
3341 case PLUS_EXPR:
3343 /* We use regular PLUS_EXPR for vectors.
3344 ??? This just makes the checker happy and may not be what is
3345 intended. */
3346 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3347 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3349 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3350 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3352 error ("invalid non-vector operands to vector valued plus");
3353 return true;
3355 lhs_type = TREE_TYPE (lhs_type);
3356 rhs1_type = TREE_TYPE (rhs1_type);
3357 rhs2_type = TREE_TYPE (rhs2_type);
3358 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3359 the pointer to 2nd place. */
3360 if (POINTER_TYPE_P (rhs2_type))
3362 tree tem = rhs1_type;
3363 rhs1_type = rhs2_type;
3364 rhs2_type = tem;
3366 goto do_pointer_plus_expr_check;
3369 /* Fallthru. */
3370 case MINUS_EXPR:
3372 if (POINTER_TYPE_P (lhs_type)
3373 || POINTER_TYPE_P (rhs1_type)
3374 || POINTER_TYPE_P (rhs2_type))
3376 error ("invalid (pointer) operands to plus/minus");
3377 return true;
3380 /* Continue with generic binary expression handling. */
3381 break;
3384 case POINTER_PLUS_EXPR:
3386 do_pointer_plus_expr_check:
3387 if (!POINTER_TYPE_P (rhs1_type)
3388 || !useless_type_conversion_p (lhs_type, rhs1_type)
3389 || !useless_type_conversion_p (sizetype, rhs2_type))
3391 error ("type mismatch in pointer plus expression");
3392 debug_generic_stmt (lhs_type);
3393 debug_generic_stmt (rhs1_type);
3394 debug_generic_stmt (rhs2_type);
3395 return true;
3398 return false;
3401 case TRUTH_ANDIF_EXPR:
3402 case TRUTH_ORIF_EXPR:
3403 gcc_unreachable ();
3405 case TRUTH_AND_EXPR:
3406 case TRUTH_OR_EXPR:
3407 case TRUTH_XOR_EXPR:
3409 /* We allow any kind of integral typed argument and result. */
3410 if (!INTEGRAL_TYPE_P (rhs1_type)
3411 || !INTEGRAL_TYPE_P (rhs2_type)
3412 || !INTEGRAL_TYPE_P (lhs_type))
3414 error ("type mismatch in binary truth expression");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 debug_generic_expr (rhs2_type);
3418 return true;
3421 return false;
3424 case LT_EXPR:
3425 case LE_EXPR:
3426 case GT_EXPR:
3427 case GE_EXPR:
3428 case EQ_EXPR:
3429 case NE_EXPR:
3430 case UNORDERED_EXPR:
3431 case ORDERED_EXPR:
3432 case UNLT_EXPR:
3433 case UNLE_EXPR:
3434 case UNGT_EXPR:
3435 case UNGE_EXPR:
3436 case UNEQ_EXPR:
3437 case LTGT_EXPR:
3438 /* Comparisons are also binary, but the result type is not
3439 connected to the operand types. */
3440 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3442 case WIDEN_SUM_EXPR:
3443 case WIDEN_MULT_EXPR:
3444 case VEC_WIDEN_MULT_HI_EXPR:
3445 case VEC_WIDEN_MULT_LO_EXPR:
3446 case VEC_PACK_TRUNC_EXPR:
3447 case VEC_PACK_SAT_EXPR:
3448 case VEC_PACK_FIX_TRUNC_EXPR:
3449 case VEC_EXTRACT_EVEN_EXPR:
3450 case VEC_EXTRACT_ODD_EXPR:
3451 case VEC_INTERLEAVE_HIGH_EXPR:
3452 case VEC_INTERLEAVE_LOW_EXPR:
3453 /* FIXME. */
3454 return false;
3456 case MULT_EXPR:
3457 case TRUNC_DIV_EXPR:
3458 case CEIL_DIV_EXPR:
3459 case FLOOR_DIV_EXPR:
3460 case ROUND_DIV_EXPR:
3461 case TRUNC_MOD_EXPR:
3462 case CEIL_MOD_EXPR:
3463 case FLOOR_MOD_EXPR:
3464 case ROUND_MOD_EXPR:
3465 case RDIV_EXPR:
3466 case EXACT_DIV_EXPR:
3467 case MIN_EXPR:
3468 case MAX_EXPR:
3469 case BIT_IOR_EXPR:
3470 case BIT_XOR_EXPR:
3471 case BIT_AND_EXPR:
3472 /* Continue with generic binary expression handling. */
3473 break;
3475 default:
3476 gcc_unreachable ();
3479 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3480 || !useless_type_conversion_p (lhs_type, rhs2_type))
3482 error ("type mismatch in binary expression");
3483 debug_generic_stmt (lhs_type);
3484 debug_generic_stmt (rhs1_type);
3485 debug_generic_stmt (rhs2_type);
3486 return true;
3489 return false;
3492 /* Verify a gimple assignment statement STMT with a single rhs.
3493 Returns true if anything is wrong. */
3495 static bool
3496 verify_gimple_assign_single (gimple stmt)
3498 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3499 tree lhs = gimple_assign_lhs (stmt);
3500 tree lhs_type = TREE_TYPE (lhs);
3501 tree rhs1 = gimple_assign_rhs1 (stmt);
3502 tree rhs1_type = TREE_TYPE (rhs1);
3503 bool res = false;
3505 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3507 error ("non-trivial conversion at assignment");
3508 debug_generic_expr (lhs_type);
3509 debug_generic_expr (rhs1_type);
3510 return true;
3513 if (handled_component_p (lhs))
3514 res |= verify_types_in_gimple_reference (lhs, true);
3516 /* Special codes we cannot handle via their class. */
3517 switch (rhs_code)
3519 case ADDR_EXPR:
3521 tree op = TREE_OPERAND (rhs1, 0);
3522 if (!is_gimple_addressable (op))
3524 error ("invalid operand in unary expression");
3525 return true;
3528 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3529 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3530 TREE_TYPE (op)))
3532 error ("type mismatch in address expression");
3533 debug_generic_stmt (TREE_TYPE (rhs1));
3534 debug_generic_stmt (TREE_TYPE (op));
3535 return true;
3538 return verify_types_in_gimple_reference (op, true);
3541 /* tcc_reference */
3542 case COMPONENT_REF:
3543 case BIT_FIELD_REF:
3544 case INDIRECT_REF:
3545 case ALIGN_INDIRECT_REF:
3546 case MISALIGNED_INDIRECT_REF:
3547 case ARRAY_REF:
3548 case ARRAY_RANGE_REF:
3549 case VIEW_CONVERT_EXPR:
3550 case REALPART_EXPR:
3551 case IMAGPART_EXPR:
3552 case TARGET_MEM_REF:
3553 if (!is_gimple_reg (lhs)
3554 && is_gimple_reg_type (TREE_TYPE (lhs)))
3556 error ("invalid rhs for gimple memory store");
3557 debug_generic_stmt (lhs);
3558 debug_generic_stmt (rhs1);
3559 return true;
3561 return res || verify_types_in_gimple_reference (rhs1, false);
3563 /* tcc_constant */
3564 case SSA_NAME:
3565 case INTEGER_CST:
3566 case REAL_CST:
3567 case FIXED_CST:
3568 case COMPLEX_CST:
3569 case VECTOR_CST:
3570 case STRING_CST:
3571 return res;
3573 /* tcc_declaration */
3574 case CONST_DECL:
3575 return res;
3576 case VAR_DECL:
3577 case PARM_DECL:
3578 if (!is_gimple_reg (lhs)
3579 && !is_gimple_reg (rhs1)
3580 && is_gimple_reg_type (TREE_TYPE (lhs)))
3582 error ("invalid rhs for gimple memory store");
3583 debug_generic_stmt (lhs);
3584 debug_generic_stmt (rhs1);
3585 return true;
3587 return res;
3589 case COND_EXPR:
3590 case CONSTRUCTOR:
3591 case OBJ_TYPE_REF:
3592 case ASSERT_EXPR:
3593 case WITH_SIZE_EXPR:
3594 case POLYNOMIAL_CHREC:
3595 case DOT_PROD_EXPR:
3596 case VEC_COND_EXPR:
3597 case REALIGN_LOAD_EXPR:
3598 /* FIXME. */
3599 return res;
3601 default:;
3604 return res;
3607 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3608 is a problem, otherwise false. */
3610 static bool
3611 verify_gimple_assign (gimple stmt)
3613 switch (gimple_assign_rhs_class (stmt))
3615 case GIMPLE_SINGLE_RHS:
3616 return verify_gimple_assign_single (stmt);
3618 case GIMPLE_UNARY_RHS:
3619 return verify_gimple_assign_unary (stmt);
3621 case GIMPLE_BINARY_RHS:
3622 return verify_gimple_assign_binary (stmt);
3624 default:
3625 gcc_unreachable ();
3629 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3630 is a problem, otherwise false. */
3632 static bool
3633 verify_gimple_return (gimple stmt)
3635 tree op = gimple_return_retval (stmt);
3636 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3638 /* We cannot test for present return values as we do not fix up missing
3639 return values from the original source. */
3640 if (op == NULL)
3641 return false;
3643 if (!is_gimple_val (op)
3644 && TREE_CODE (op) != RESULT_DECL)
3646 error ("invalid operand in return statement");
3647 debug_generic_stmt (op);
3648 return true;
3651 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3652 /* ??? With C++ we can have the situation that the result
3653 decl is a reference type while the return type is an aggregate. */
3654 && !(TREE_CODE (op) == RESULT_DECL
3655 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3656 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3658 error ("invalid conversion in return statement");
3659 debug_generic_stmt (restype);
3660 debug_generic_stmt (TREE_TYPE (op));
3661 return true;
3664 return false;
3668 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3669 is a problem, otherwise false. */
3671 static bool
3672 verify_gimple_goto (gimple stmt)
3674 tree dest = gimple_goto_dest (stmt);
3676 /* ??? We have two canonical forms of direct goto destinations, a
3677 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3678 if (TREE_CODE (dest) != LABEL_DECL
3679 && (!is_gimple_val (dest)
3680 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3682 error ("goto destination is neither a label nor a pointer");
3683 return true;
3686 return false;
3689 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3690 is a problem, otherwise false. */
3692 static bool
3693 verify_gimple_switch (gimple stmt)
3695 if (!is_gimple_val (gimple_switch_index (stmt)))
3697 error ("invalid operand to switch statement");
3698 debug_generic_stmt (gimple_switch_index (stmt));
3699 return true;
3702 return false;
3706 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3707 and false otherwise. */
3709 static bool
3710 verify_gimple_phi (gimple stmt)
3712 tree type = TREE_TYPE (gimple_phi_result (stmt));
3713 unsigned i;
3715 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3717 error ("Invalid PHI result");
3718 return true;
3721 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3723 tree arg = gimple_phi_arg_def (stmt, i);
3724 if ((is_gimple_reg (gimple_phi_result (stmt))
3725 && !is_gimple_val (arg))
3726 || (!is_gimple_reg (gimple_phi_result (stmt))
3727 && !is_gimple_addressable (arg)))
3729 error ("Invalid PHI argument");
3730 debug_generic_stmt (arg);
3731 return true;
3733 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3735 error ("Incompatible types in PHI argument %u", i);
3736 debug_generic_stmt (type);
3737 debug_generic_stmt (TREE_TYPE (arg));
3738 return true;
3742 return false;
3746 /* Verify a gimple debug statement STMT.
3747 Returns true if anything is wrong. */
3749 static bool
3750 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3752 /* There isn't much that could be wrong in a gimple debug stmt. A
3753 gimple debug bind stmt, for example, maps a tree, that's usually
3754 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3755 component or member of an aggregate type, to another tree, that
3756 can be an arbitrary expression. These stmts expand into debug
3757 insns, and are converted to debug notes by var-tracking.c. */
3758 return false;
3762 /* Verify the GIMPLE statement STMT. Returns true if there is an
3763 error, otherwise false. */
3765 static bool
3766 verify_types_in_gimple_stmt (gimple stmt)
3768 switch (gimple_code (stmt))
3770 case GIMPLE_ASSIGN:
3771 return verify_gimple_assign (stmt);
3773 case GIMPLE_LABEL:
3774 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3776 case GIMPLE_CALL:
3777 return verify_gimple_call (stmt);
3779 case GIMPLE_COND:
3780 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3782 error ("invalid comparison code in gimple cond");
3783 return true;
3785 if (!(!gimple_cond_true_label (stmt)
3786 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3787 || !(!gimple_cond_false_label (stmt)
3788 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3790 error ("invalid labels in gimple cond");
3791 return true;
3794 return verify_gimple_comparison (boolean_type_node,
3795 gimple_cond_lhs (stmt),
3796 gimple_cond_rhs (stmt));
3798 case GIMPLE_GOTO:
3799 return verify_gimple_goto (stmt);
3801 case GIMPLE_SWITCH:
3802 return verify_gimple_switch (stmt);
3804 case GIMPLE_RETURN:
3805 return verify_gimple_return (stmt);
3807 case GIMPLE_ASM:
3808 return false;
3810 case GIMPLE_PHI:
3811 return verify_gimple_phi (stmt);
3813 /* Tuples that do not have tree operands. */
3814 case GIMPLE_NOP:
3815 case GIMPLE_PREDICT:
3816 case GIMPLE_RESX:
3817 case GIMPLE_EH_DISPATCH:
3818 case GIMPLE_EH_MUST_NOT_THROW:
3819 return false;
3821 CASE_GIMPLE_OMP:
3822 /* OpenMP directives are validated by the FE and never operated
3823 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3824 non-gimple expressions when the main index variable has had
3825 its address taken. This does not affect the loop itself
3826 because the header of an GIMPLE_OMP_FOR is merely used to determine
3827 how to setup the parallel iteration. */
3828 return false;
3830 case GIMPLE_DEBUG:
3831 return verify_gimple_debug (stmt);
3833 default:
3834 gcc_unreachable ();
3838 /* Verify the GIMPLE statements inside the sequence STMTS. */
3840 static bool
3841 verify_types_in_gimple_seq_2 (gimple_seq stmts)
3843 gimple_stmt_iterator ittr;
3844 bool err = false;
3846 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3848 gimple stmt = gsi_stmt (ittr);
3850 switch (gimple_code (stmt))
3852 case GIMPLE_BIND:
3853 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3854 break;
3856 case GIMPLE_TRY:
3857 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3858 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3859 break;
3861 case GIMPLE_EH_FILTER:
3862 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3863 break;
3865 case GIMPLE_CATCH:
3866 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3867 break;
3869 default:
3871 bool err2 = verify_types_in_gimple_stmt (stmt);
3872 if (err2)
3873 debug_gimple_stmt (stmt);
3874 err |= err2;
3879 return err;
3883 /* Verify the GIMPLE statements inside the statement list STMTS. */
3885 void
3886 verify_types_in_gimple_seq (gimple_seq stmts)
3888 if (verify_types_in_gimple_seq_2 (stmts))
3889 internal_error ("verify_gimple failed");
3893 /* Verify STMT, return true if STMT is not in GIMPLE form.
3894 TODO: Implement type checking. */
3896 static bool
3897 verify_stmt (gimple_stmt_iterator *gsi)
3899 tree addr;
3900 struct walk_stmt_info wi;
3901 bool last_in_block = gsi_one_before_end_p (*gsi);
3902 gimple stmt = gsi_stmt (*gsi);
3903 int lp_nr;
3905 if (is_gimple_omp (stmt))
3907 /* OpenMP directives are validated by the FE and never operated
3908 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3909 non-gimple expressions when the main index variable has had
3910 its address taken. This does not affect the loop itself
3911 because the header of an GIMPLE_OMP_FOR is merely used to determine
3912 how to setup the parallel iteration. */
3913 return false;
3916 /* FIXME. The C frontend passes unpromoted arguments in case it
3917 didn't see a function declaration before the call. */
3918 if (is_gimple_call (stmt))
3920 tree decl;
3922 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3924 error ("invalid function in call statement");
3925 return true;
3928 decl = gimple_call_fndecl (stmt);
3929 if (decl
3930 && TREE_CODE (decl) == FUNCTION_DECL
3931 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3932 && (!DECL_PURE_P (decl))
3933 && (!TREE_READONLY (decl)))
3935 error ("invalid pure const state for function");
3936 return true;
3940 if (is_gimple_debug (stmt))
3941 return false;
3943 memset (&wi, 0, sizeof (wi));
3944 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
3945 if (addr)
3947 debug_generic_expr (addr);
3948 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
3949 debug_gimple_stmt (stmt);
3950 return true;
3953 /* If the statement is marked as part of an EH region, then it is
3954 expected that the statement could throw. Verify that when we
3955 have optimizations that simplify statements such that we prove
3956 that they cannot throw, that we update other data structures
3957 to match. */
3958 lp_nr = lookup_stmt_eh_lp (stmt);
3959 if (lp_nr != 0)
3961 if (!stmt_could_throw_p (stmt))
3963 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
3964 and they are updated on statements only after fixup_cfg
3965 is executed at beggining of expansion stage. */
3966 if (cgraph_state != CGRAPH_STATE_IPA_SSA)
3968 error ("statement marked for throw, but doesn%'t");
3969 goto fail;
3972 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
3974 error ("statement marked for throw in middle of block");
3975 goto fail;
3979 return false;
3981 fail:
3982 debug_gimple_stmt (stmt);
3983 return true;
3987 /* Return true when the T can be shared. */
3989 bool
3990 tree_node_can_be_shared (tree t)
3992 if (IS_TYPE_OR_DECL_P (t)
3993 || is_gimple_min_invariant (t)
3994 || TREE_CODE (t) == SSA_NAME
3995 || t == error_mark_node
3996 || TREE_CODE (t) == IDENTIFIER_NODE)
3997 return true;
3999 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4000 return true;
4002 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4003 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4004 || TREE_CODE (t) == COMPONENT_REF
4005 || TREE_CODE (t) == REALPART_EXPR
4006 || TREE_CODE (t) == IMAGPART_EXPR)
4007 t = TREE_OPERAND (t, 0);
4009 if (DECL_P (t))
4010 return true;
4012 return false;
4016 /* Called via walk_gimple_stmt. Verify tree sharing. */
4018 static tree
4019 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4021 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4022 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4024 if (tree_node_can_be_shared (*tp))
4026 *walk_subtrees = false;
4027 return NULL;
4030 if (pointer_set_insert (visited, *tp))
4031 return *tp;
4033 return NULL;
4037 static bool eh_error_found;
4038 static int
4039 verify_eh_throw_stmt_node (void **slot, void *data)
4041 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4042 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4044 if (!pointer_set_contains (visited, node->stmt))
4046 error ("Dead STMT in EH table");
4047 debug_gimple_stmt (node->stmt);
4048 eh_error_found = true;
4050 return 1;
4054 /* Verify the GIMPLE statements in every basic block. */
4056 void
4057 verify_stmts (void)
4059 basic_block bb;
4060 gimple_stmt_iterator gsi;
4061 bool err = false;
4062 struct pointer_set_t *visited, *visited_stmts;
4063 tree addr;
4064 struct walk_stmt_info wi;
4066 timevar_push (TV_TREE_STMT_VERIFY);
4067 visited = pointer_set_create ();
4068 visited_stmts = pointer_set_create ();
4070 memset (&wi, 0, sizeof (wi));
4071 wi.info = (void *) visited;
4073 FOR_EACH_BB (bb)
4075 gimple phi;
4076 size_t i;
4078 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4080 phi = gsi_stmt (gsi);
4081 pointer_set_insert (visited_stmts, phi);
4082 if (gimple_bb (phi) != bb)
4084 error ("gimple_bb (phi) is set to a wrong basic block");
4085 err |= true;
4088 for (i = 0; i < gimple_phi_num_args (phi); i++)
4090 tree t = gimple_phi_arg_def (phi, i);
4091 tree addr;
4093 if (!t)
4095 error ("missing PHI def");
4096 debug_gimple_stmt (phi);
4097 err |= true;
4098 continue;
4100 /* Addressable variables do have SSA_NAMEs but they
4101 are not considered gimple values. */
4102 else if (TREE_CODE (t) != SSA_NAME
4103 && TREE_CODE (t) != FUNCTION_DECL
4104 && !is_gimple_min_invariant (t))
4106 error ("PHI argument is not a GIMPLE value");
4107 debug_gimple_stmt (phi);
4108 debug_generic_expr (t);
4109 err |= true;
4112 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4113 if (addr)
4115 error ("incorrect sharing of tree nodes");
4116 debug_gimple_stmt (phi);
4117 debug_generic_expr (addr);
4118 err |= true;
4122 #ifdef ENABLE_TYPES_CHECKING
4123 if (verify_gimple_phi (phi))
4125 debug_gimple_stmt (phi);
4126 err |= true;
4128 #endif
4131 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4133 gimple stmt = gsi_stmt (gsi);
4135 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4136 || gimple_code (stmt) == GIMPLE_BIND)
4138 error ("invalid GIMPLE statement");
4139 debug_gimple_stmt (stmt);
4140 err |= true;
4143 pointer_set_insert (visited_stmts, stmt);
4145 if (gimple_bb (stmt) != bb)
4147 error ("gimple_bb (stmt) is set to a wrong basic block");
4148 debug_gimple_stmt (stmt);
4149 err |= true;
4152 if (gimple_code (stmt) == GIMPLE_LABEL)
4154 tree decl = gimple_label_label (stmt);
4155 int uid = LABEL_DECL_UID (decl);
4157 if (uid == -1
4158 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4160 error ("incorrect entry in label_to_block_map");
4161 err |= true;
4164 uid = EH_LANDING_PAD_NR (decl);
4165 if (uid)
4167 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4168 if (decl != lp->post_landing_pad)
4170 error ("incorrect setting of landing pad number");
4171 err |= true;
4176 err |= verify_stmt (&gsi);
4178 #ifdef ENABLE_TYPES_CHECKING
4179 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4181 debug_gimple_stmt (stmt);
4182 err |= true;
4184 #endif
4185 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4186 if (addr)
4188 error ("incorrect sharing of tree nodes");
4189 debug_gimple_stmt (stmt);
4190 debug_generic_expr (addr);
4191 err |= true;
4193 gsi_next (&gsi);
4197 eh_error_found = false;
4198 if (get_eh_throw_stmt_table (cfun))
4199 htab_traverse (get_eh_throw_stmt_table (cfun),
4200 verify_eh_throw_stmt_node,
4201 visited_stmts);
4203 if (err | eh_error_found)
4204 internal_error ("verify_stmts failed");
4206 pointer_set_destroy (visited);
4207 pointer_set_destroy (visited_stmts);
4208 verify_histograms ();
4209 timevar_pop (TV_TREE_STMT_VERIFY);
4213 /* Verifies that the flow information is OK. */
4215 static int
4216 gimple_verify_flow_info (void)
4218 int err = 0;
4219 basic_block bb;
4220 gimple_stmt_iterator gsi;
4221 gimple stmt;
4222 edge e;
4223 edge_iterator ei;
4225 if (ENTRY_BLOCK_PTR->il.gimple)
4227 error ("ENTRY_BLOCK has IL associated with it");
4228 err = 1;
4231 if (EXIT_BLOCK_PTR->il.gimple)
4233 error ("EXIT_BLOCK has IL associated with it");
4234 err = 1;
4237 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4238 if (e->flags & EDGE_FALLTHRU)
4240 error ("fallthru to exit from bb %d", e->src->index);
4241 err = 1;
4244 FOR_EACH_BB (bb)
4246 bool found_ctrl_stmt = false;
4248 stmt = NULL;
4250 /* Skip labels on the start of basic block. */
4251 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4253 tree label;
4254 gimple prev_stmt = stmt;
4256 stmt = gsi_stmt (gsi);
4258 if (gimple_code (stmt) != GIMPLE_LABEL)
4259 break;
4261 label = gimple_label_label (stmt);
4262 if (prev_stmt && DECL_NONLOCAL (label))
4264 error ("nonlocal label ");
4265 print_generic_expr (stderr, label, 0);
4266 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4267 bb->index);
4268 err = 1;
4271 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4273 error ("EH landing pad label ");
4274 print_generic_expr (stderr, label, 0);
4275 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4276 bb->index);
4277 err = 1;
4280 if (label_to_block (label) != bb)
4282 error ("label ");
4283 print_generic_expr (stderr, label, 0);
4284 fprintf (stderr, " to block does not match in bb %d",
4285 bb->index);
4286 err = 1;
4289 if (decl_function_context (label) != current_function_decl)
4291 error ("label ");
4292 print_generic_expr (stderr, label, 0);
4293 fprintf (stderr, " has incorrect context in bb %d",
4294 bb->index);
4295 err = 1;
4299 /* Verify that body of basic block BB is free of control flow. */
4300 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4302 gimple stmt = gsi_stmt (gsi);
4304 if (found_ctrl_stmt)
4306 error ("control flow in the middle of basic block %d",
4307 bb->index);
4308 err = 1;
4311 if (stmt_ends_bb_p (stmt))
4312 found_ctrl_stmt = true;
4314 if (gimple_code (stmt) == GIMPLE_LABEL)
4316 error ("label ");
4317 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4318 fprintf (stderr, " in the middle of basic block %d", bb->index);
4319 err = 1;
4323 gsi = gsi_last_bb (bb);
4324 if (gsi_end_p (gsi))
4325 continue;
4327 stmt = gsi_stmt (gsi);
4329 if (gimple_code (stmt) == GIMPLE_LABEL)
4330 continue;
4332 err |= verify_eh_edges (stmt);
4334 if (is_ctrl_stmt (stmt))
4336 FOR_EACH_EDGE (e, ei, bb->succs)
4337 if (e->flags & EDGE_FALLTHRU)
4339 error ("fallthru edge after a control statement in bb %d",
4340 bb->index);
4341 err = 1;
4345 if (gimple_code (stmt) != GIMPLE_COND)
4347 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4348 after anything else but if statement. */
4349 FOR_EACH_EDGE (e, ei, bb->succs)
4350 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4352 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4353 bb->index);
4354 err = 1;
4358 switch (gimple_code (stmt))
4360 case GIMPLE_COND:
4362 edge true_edge;
4363 edge false_edge;
4365 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4367 if (!true_edge
4368 || !false_edge
4369 || !(true_edge->flags & EDGE_TRUE_VALUE)
4370 || !(false_edge->flags & EDGE_FALSE_VALUE)
4371 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4372 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4373 || EDGE_COUNT (bb->succs) >= 3)
4375 error ("wrong outgoing edge flags at end of bb %d",
4376 bb->index);
4377 err = 1;
4380 break;
4382 case GIMPLE_GOTO:
4383 if (simple_goto_p (stmt))
4385 error ("explicit goto at end of bb %d", bb->index);
4386 err = 1;
4388 else
4390 /* FIXME. We should double check that the labels in the
4391 destination blocks have their address taken. */
4392 FOR_EACH_EDGE (e, ei, bb->succs)
4393 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4394 | EDGE_FALSE_VALUE))
4395 || !(e->flags & EDGE_ABNORMAL))
4397 error ("wrong outgoing edge flags at end of bb %d",
4398 bb->index);
4399 err = 1;
4402 break;
4404 case GIMPLE_RETURN:
4405 if (!single_succ_p (bb)
4406 || (single_succ_edge (bb)->flags
4407 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4408 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4410 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4411 err = 1;
4413 if (single_succ (bb) != EXIT_BLOCK_PTR)
4415 error ("return edge does not point to exit in bb %d",
4416 bb->index);
4417 err = 1;
4419 break;
4421 case GIMPLE_SWITCH:
4423 tree prev;
4424 edge e;
4425 size_t i, n;
4427 n = gimple_switch_num_labels (stmt);
4429 /* Mark all the destination basic blocks. */
4430 for (i = 0; i < n; ++i)
4432 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4433 basic_block label_bb = label_to_block (lab);
4434 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4435 label_bb->aux = (void *)1;
4438 /* Verify that the case labels are sorted. */
4439 prev = gimple_switch_label (stmt, 0);
4440 for (i = 1; i < n; ++i)
4442 tree c = gimple_switch_label (stmt, i);
4443 if (!CASE_LOW (c))
4445 error ("found default case not at the start of "
4446 "case vector");
4447 err = 1;
4448 continue;
4450 if (CASE_LOW (prev)
4451 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4453 error ("case labels not sorted: ");
4454 print_generic_expr (stderr, prev, 0);
4455 fprintf (stderr," is greater than ");
4456 print_generic_expr (stderr, c, 0);
4457 fprintf (stderr," but comes before it.\n");
4458 err = 1;
4460 prev = c;
4462 /* VRP will remove the default case if it can prove it will
4463 never be executed. So do not verify there always exists
4464 a default case here. */
4466 FOR_EACH_EDGE (e, ei, bb->succs)
4468 if (!e->dest->aux)
4470 error ("extra outgoing edge %d->%d",
4471 bb->index, e->dest->index);
4472 err = 1;
4475 e->dest->aux = (void *)2;
4476 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4477 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4479 error ("wrong outgoing edge flags at end of bb %d",
4480 bb->index);
4481 err = 1;
4485 /* Check that we have all of them. */
4486 for (i = 0; i < n; ++i)
4488 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4489 basic_block label_bb = label_to_block (lab);
4491 if (label_bb->aux != (void *)2)
4493 error ("missing edge %i->%i", bb->index, label_bb->index);
4494 err = 1;
4498 FOR_EACH_EDGE (e, ei, bb->succs)
4499 e->dest->aux = (void *)0;
4501 break;
4503 case GIMPLE_EH_DISPATCH:
4504 err |= verify_eh_dispatch_edge (stmt);
4505 break;
4507 default:
4508 break;
4512 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4513 verify_dominators (CDI_DOMINATORS);
4515 return err;
4519 /* Updates phi nodes after creating a forwarder block joined
4520 by edge FALLTHRU. */
4522 static void
4523 gimple_make_forwarder_block (edge fallthru)
4525 edge e;
4526 edge_iterator ei;
4527 basic_block dummy, bb;
4528 tree var;
4529 gimple_stmt_iterator gsi;
4531 dummy = fallthru->src;
4532 bb = fallthru->dest;
4534 if (single_pred_p (bb))
4535 return;
4537 /* If we redirected a branch we must create new PHI nodes at the
4538 start of BB. */
4539 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4541 gimple phi, new_phi;
4543 phi = gsi_stmt (gsi);
4544 var = gimple_phi_result (phi);
4545 new_phi = create_phi_node (var, bb);
4546 SSA_NAME_DEF_STMT (var) = new_phi;
4547 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4548 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4549 UNKNOWN_LOCATION);
4552 /* Add the arguments we have stored on edges. */
4553 FOR_EACH_EDGE (e, ei, bb->preds)
4555 if (e == fallthru)
4556 continue;
4558 flush_pending_stmts (e);
4563 /* Return a non-special label in the head of basic block BLOCK.
4564 Create one if it doesn't exist. */
4566 tree
4567 gimple_block_label (basic_block bb)
4569 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4570 bool first = true;
4571 tree label;
4572 gimple stmt;
4574 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4576 stmt = gsi_stmt (i);
4577 if (gimple_code (stmt) != GIMPLE_LABEL)
4578 break;
4579 label = gimple_label_label (stmt);
4580 if (!DECL_NONLOCAL (label))
4582 if (!first)
4583 gsi_move_before (&i, &s);
4584 return label;
4588 label = create_artificial_label (UNKNOWN_LOCATION);
4589 stmt = gimple_build_label (label);
4590 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4591 return label;
4595 /* Attempt to perform edge redirection by replacing a possibly complex
4596 jump instruction by a goto or by removing the jump completely.
4597 This can apply only if all edges now point to the same block. The
4598 parameters and return values are equivalent to
4599 redirect_edge_and_branch. */
4601 static edge
4602 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4604 basic_block src = e->src;
4605 gimple_stmt_iterator i;
4606 gimple stmt;
4608 /* We can replace or remove a complex jump only when we have exactly
4609 two edges. */
4610 if (EDGE_COUNT (src->succs) != 2
4611 /* Verify that all targets will be TARGET. Specifically, the
4612 edge that is not E must also go to TARGET. */
4613 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4614 return NULL;
4616 i = gsi_last_bb (src);
4617 if (gsi_end_p (i))
4618 return NULL;
4620 stmt = gsi_stmt (i);
4622 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4624 gsi_remove (&i, true);
4625 e = ssa_redirect_edge (e, target);
4626 e->flags = EDGE_FALLTHRU;
4627 return e;
4630 return NULL;
4634 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4635 edge representing the redirected branch. */
4637 static edge
4638 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4640 basic_block bb = e->src;
4641 gimple_stmt_iterator gsi;
4642 edge ret;
4643 gimple stmt;
4645 if (e->flags & EDGE_ABNORMAL)
4646 return NULL;
4648 if (e->dest == dest)
4649 return NULL;
4651 if (e->flags & EDGE_EH)
4652 return redirect_eh_edge (e, dest);
4654 if (e->src != ENTRY_BLOCK_PTR)
4656 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4657 if (ret)
4658 return ret;
4661 gsi = gsi_last_bb (bb);
4662 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4664 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4666 case GIMPLE_COND:
4667 /* For COND_EXPR, we only need to redirect the edge. */
4668 break;
4670 case GIMPLE_GOTO:
4671 /* No non-abnormal edges should lead from a non-simple goto, and
4672 simple ones should be represented implicitly. */
4673 gcc_unreachable ();
4675 case GIMPLE_SWITCH:
4677 tree label = gimple_block_label (dest);
4678 tree cases = get_cases_for_edge (e, stmt);
4680 /* If we have a list of cases associated with E, then use it
4681 as it's a lot faster than walking the entire case vector. */
4682 if (cases)
4684 edge e2 = find_edge (e->src, dest);
4685 tree last, first;
4687 first = cases;
4688 while (cases)
4690 last = cases;
4691 CASE_LABEL (cases) = label;
4692 cases = TREE_CHAIN (cases);
4695 /* If there was already an edge in the CFG, then we need
4696 to move all the cases associated with E to E2. */
4697 if (e2)
4699 tree cases2 = get_cases_for_edge (e2, stmt);
4701 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4702 TREE_CHAIN (cases2) = first;
4705 else
4707 size_t i, n = gimple_switch_num_labels (stmt);
4709 for (i = 0; i < n; i++)
4711 tree elt = gimple_switch_label (stmt, i);
4712 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4713 CASE_LABEL (elt) = label;
4717 break;
4719 case GIMPLE_ASM:
4721 int i, n = gimple_asm_nlabels (stmt);
4722 tree label = NULL;
4724 for (i = 0; i < n; ++i)
4726 tree cons = gimple_asm_label_op (stmt, i);
4727 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4729 if (!label)
4730 label = gimple_block_label (dest);
4731 TREE_VALUE (cons) = label;
4735 /* If we didn't find any label matching the former edge in the
4736 asm labels, we must be redirecting the fallthrough
4737 edge. */
4738 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4740 break;
4742 case GIMPLE_RETURN:
4743 gsi_remove (&gsi, true);
4744 e->flags |= EDGE_FALLTHRU;
4745 break;
4747 case GIMPLE_OMP_RETURN:
4748 case GIMPLE_OMP_CONTINUE:
4749 case GIMPLE_OMP_SECTIONS_SWITCH:
4750 case GIMPLE_OMP_FOR:
4751 /* The edges from OMP constructs can be simply redirected. */
4752 break;
4754 case GIMPLE_EH_DISPATCH:
4755 if (!(e->flags & EDGE_FALLTHRU))
4756 redirect_eh_dispatch_edge (stmt, e, dest);
4757 break;
4759 default:
4760 /* Otherwise it must be a fallthru edge, and we don't need to
4761 do anything besides redirecting it. */
4762 gcc_assert (e->flags & EDGE_FALLTHRU);
4763 break;
4766 /* Update/insert PHI nodes as necessary. */
4768 /* Now update the edges in the CFG. */
4769 e = ssa_redirect_edge (e, dest);
4771 return e;
4774 /* Returns true if it is possible to remove edge E by redirecting
4775 it to the destination of the other edge from E->src. */
4777 static bool
4778 gimple_can_remove_branch_p (const_edge e)
4780 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4781 return false;
4783 return true;
4786 /* Simple wrapper, as we can always redirect fallthru edges. */
4788 static basic_block
4789 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4791 e = gimple_redirect_edge_and_branch (e, dest);
4792 gcc_assert (e);
4794 return NULL;
4798 /* Splits basic block BB after statement STMT (but at least after the
4799 labels). If STMT is NULL, BB is split just after the labels. */
4801 static basic_block
4802 gimple_split_block (basic_block bb, void *stmt)
4804 gimple_stmt_iterator gsi;
4805 gimple_stmt_iterator gsi_tgt;
4806 gimple act;
4807 gimple_seq list;
4808 basic_block new_bb;
4809 edge e;
4810 edge_iterator ei;
4812 new_bb = create_empty_bb (bb);
4814 /* Redirect the outgoing edges. */
4815 new_bb->succs = bb->succs;
4816 bb->succs = NULL;
4817 FOR_EACH_EDGE (e, ei, new_bb->succs)
4818 e->src = new_bb;
4820 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4821 stmt = NULL;
4823 /* Move everything from GSI to the new basic block. */
4824 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4826 act = gsi_stmt (gsi);
4827 if (gimple_code (act) == GIMPLE_LABEL)
4828 continue;
4830 if (!stmt)
4831 break;
4833 if (stmt == act)
4835 gsi_next (&gsi);
4836 break;
4840 if (gsi_end_p (gsi))
4841 return new_bb;
4843 /* Split the statement list - avoid re-creating new containers as this
4844 brings ugly quadratic memory consumption in the inliner.
4845 (We are still quadratic since we need to update stmt BB pointers,
4846 sadly.) */
4847 list = gsi_split_seq_before (&gsi);
4848 set_bb_seq (new_bb, list);
4849 for (gsi_tgt = gsi_start (list);
4850 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4851 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4853 return new_bb;
4857 /* Moves basic block BB after block AFTER. */
4859 static bool
4860 gimple_move_block_after (basic_block bb, basic_block after)
4862 if (bb->prev_bb == after)
4863 return true;
4865 unlink_block (bb);
4866 link_block (bb, after);
4868 return true;
4872 /* Return true if basic_block can be duplicated. */
4874 static bool
4875 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4877 return true;
4880 /* Create a duplicate of the basic block BB. NOTE: This does not
4881 preserve SSA form. */
4883 static basic_block
4884 gimple_duplicate_bb (basic_block bb)
4886 basic_block new_bb;
4887 gimple_stmt_iterator gsi, gsi_tgt;
4888 gimple_seq phis = phi_nodes (bb);
4889 gimple phi, stmt, copy;
4891 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4893 /* Copy the PHI nodes. We ignore PHI node arguments here because
4894 the incoming edges have not been setup yet. */
4895 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4897 phi = gsi_stmt (gsi);
4898 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4899 create_new_def_for (gimple_phi_result (copy), copy,
4900 gimple_phi_result_ptr (copy));
4903 gsi_tgt = gsi_start_bb (new_bb);
4904 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4906 def_operand_p def_p;
4907 ssa_op_iter op_iter;
4909 stmt = gsi_stmt (gsi);
4910 if (gimple_code (stmt) == GIMPLE_LABEL)
4911 continue;
4913 /* Create a new copy of STMT and duplicate STMT's virtual
4914 operands. */
4915 copy = gimple_copy (stmt);
4916 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
4918 maybe_duplicate_eh_stmt (copy, stmt);
4919 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4921 /* Create new names for all the definitions created by COPY and
4922 add replacement mappings for each new name. */
4923 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4924 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4927 return new_bb;
4930 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
4932 static void
4933 add_phi_args_after_copy_edge (edge e_copy)
4935 basic_block bb, bb_copy = e_copy->src, dest;
4936 edge e;
4937 edge_iterator ei;
4938 gimple phi, phi_copy;
4939 tree def;
4940 gimple_stmt_iterator psi, psi_copy;
4942 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
4943 return;
4945 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4947 if (e_copy->dest->flags & BB_DUPLICATED)
4948 dest = get_bb_original (e_copy->dest);
4949 else
4950 dest = e_copy->dest;
4952 e = find_edge (bb, dest);
4953 if (!e)
4955 /* During loop unrolling the target of the latch edge is copied.
4956 In this case we are not looking for edge to dest, but to
4957 duplicated block whose original was dest. */
4958 FOR_EACH_EDGE (e, ei, bb->succs)
4960 if ((e->dest->flags & BB_DUPLICATED)
4961 && get_bb_original (e->dest) == dest)
4962 break;
4965 gcc_assert (e != NULL);
4968 for (psi = gsi_start_phis (e->dest),
4969 psi_copy = gsi_start_phis (e_copy->dest);
4970 !gsi_end_p (psi);
4971 gsi_next (&psi), gsi_next (&psi_copy))
4973 phi = gsi_stmt (psi);
4974 phi_copy = gsi_stmt (psi_copy);
4975 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4976 add_phi_arg (phi_copy, def, e_copy,
4977 gimple_phi_arg_location_from_edge (phi, e));
4982 /* Basic block BB_COPY was created by code duplication. Add phi node
4983 arguments for edges going out of BB_COPY. The blocks that were
4984 duplicated have BB_DUPLICATED set. */
4986 void
4987 add_phi_args_after_copy_bb (basic_block bb_copy)
4989 edge e_copy;
4990 edge_iterator ei;
4992 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4994 add_phi_args_after_copy_edge (e_copy);
4998 /* Blocks in REGION_COPY array of length N_REGION were created by
4999 duplication of basic blocks. Add phi node arguments for edges
5000 going from these blocks. If E_COPY is not NULL, also add
5001 phi node arguments for its destination.*/
5003 void
5004 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5005 edge e_copy)
5007 unsigned i;
5009 for (i = 0; i < n_region; i++)
5010 region_copy[i]->flags |= BB_DUPLICATED;
5012 for (i = 0; i < n_region; i++)
5013 add_phi_args_after_copy_bb (region_copy[i]);
5014 if (e_copy)
5015 add_phi_args_after_copy_edge (e_copy);
5017 for (i = 0; i < n_region; i++)
5018 region_copy[i]->flags &= ~BB_DUPLICATED;
5021 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5022 important exit edge EXIT. By important we mean that no SSA name defined
5023 inside region is live over the other exit edges of the region. All entry
5024 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5025 to the duplicate of the region. SSA form, dominance and loop information
5026 is updated. The new basic blocks are stored to REGION_COPY in the same
5027 order as they had in REGION, provided that REGION_COPY is not NULL.
5028 The function returns false if it is unable to copy the region,
5029 true otherwise. */
5031 bool
5032 gimple_duplicate_sese_region (edge entry, edge exit,
5033 basic_block *region, unsigned n_region,
5034 basic_block *region_copy)
5036 unsigned i;
5037 bool free_region_copy = false, copying_header = false;
5038 struct loop *loop = entry->dest->loop_father;
5039 edge exit_copy;
5040 VEC (basic_block, heap) *doms;
5041 edge redirected;
5042 int total_freq = 0, entry_freq = 0;
5043 gcov_type total_count = 0, entry_count = 0;
5045 if (!can_copy_bbs_p (region, n_region))
5046 return false;
5048 /* Some sanity checking. Note that we do not check for all possible
5049 missuses of the functions. I.e. if you ask to copy something weird,
5050 it will work, but the state of structures probably will not be
5051 correct. */
5052 for (i = 0; i < n_region; i++)
5054 /* We do not handle subloops, i.e. all the blocks must belong to the
5055 same loop. */
5056 if (region[i]->loop_father != loop)
5057 return false;
5059 if (region[i] != entry->dest
5060 && region[i] == loop->header)
5061 return false;
5064 set_loop_copy (loop, loop);
5066 /* In case the function is used for loop header copying (which is the primary
5067 use), ensure that EXIT and its copy will be new latch and entry edges. */
5068 if (loop->header == entry->dest)
5070 copying_header = true;
5071 set_loop_copy (loop, loop_outer (loop));
5073 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5074 return false;
5076 for (i = 0; i < n_region; i++)
5077 if (region[i] != exit->src
5078 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5079 return false;
5082 if (!region_copy)
5084 region_copy = XNEWVEC (basic_block, n_region);
5085 free_region_copy = true;
5088 gcc_assert (!need_ssa_update_p (cfun));
5090 /* Record blocks outside the region that are dominated by something
5091 inside. */
5092 doms = NULL;
5093 initialize_original_copy_tables ();
5095 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5097 if (entry->dest->count)
5099 total_count = entry->dest->count;
5100 entry_count = entry->count;
5101 /* Fix up corner cases, to avoid division by zero or creation of negative
5102 frequencies. */
5103 if (entry_count > total_count)
5104 entry_count = total_count;
5106 else
5108 total_freq = entry->dest->frequency;
5109 entry_freq = EDGE_FREQUENCY (entry);
5110 /* Fix up corner cases, to avoid division by zero or creation of negative
5111 frequencies. */
5112 if (total_freq == 0)
5113 total_freq = 1;
5114 else if (entry_freq > total_freq)
5115 entry_freq = total_freq;
5118 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5119 split_edge_bb_loc (entry));
5120 if (total_count)
5122 scale_bbs_frequencies_gcov_type (region, n_region,
5123 total_count - entry_count,
5124 total_count);
5125 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5126 total_count);
5128 else
5130 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5131 total_freq);
5132 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5135 if (copying_header)
5137 loop->header = exit->dest;
5138 loop->latch = exit->src;
5141 /* Redirect the entry and add the phi node arguments. */
5142 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5143 gcc_assert (redirected != NULL);
5144 flush_pending_stmts (entry);
5146 /* Concerning updating of dominators: We must recount dominators
5147 for entry block and its copy. Anything that is outside of the
5148 region, but was dominated by something inside needs recounting as
5149 well. */
5150 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5151 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5152 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5153 VEC_free (basic_block, heap, doms);
5155 /* Add the other PHI node arguments. */
5156 add_phi_args_after_copy (region_copy, n_region, NULL);
5158 /* Update the SSA web. */
5159 update_ssa (TODO_update_ssa);
5161 if (free_region_copy)
5162 free (region_copy);
5164 free_original_copy_tables ();
5165 return true;
5168 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5169 are stored to REGION_COPY in the same order in that they appear
5170 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5171 the region, EXIT an exit from it. The condition guarding EXIT
5172 is moved to ENTRY. Returns true if duplication succeeds, false
5173 otherwise.
5175 For example,
5177 some_code;
5178 if (cond)
5180 else
5183 is transformed to
5185 if (cond)
5187 some_code;
5190 else
5192 some_code;
5197 bool
5198 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5199 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5200 basic_block *region_copy ATTRIBUTE_UNUSED)
5202 unsigned i;
5203 bool free_region_copy = false;
5204 struct loop *loop = exit->dest->loop_father;
5205 struct loop *orig_loop = entry->dest->loop_father;
5206 basic_block switch_bb, entry_bb, nentry_bb;
5207 VEC (basic_block, heap) *doms;
5208 int total_freq = 0, exit_freq = 0;
5209 gcov_type total_count = 0, exit_count = 0;
5210 edge exits[2], nexits[2], e;
5211 gimple_stmt_iterator gsi,gsi1;
5212 gimple cond_stmt;
5213 edge sorig, snew;
5214 basic_block exit_bb;
5215 basic_block iters_bb;
5216 tree new_rhs;
5217 gimple_stmt_iterator psi;
5218 gimple phi;
5219 tree def;
5221 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5222 exits[0] = exit;
5223 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5225 if (!can_copy_bbs_p (region, n_region))
5226 return false;
5228 initialize_original_copy_tables ();
5229 set_loop_copy (orig_loop, loop);
5230 duplicate_subloops (orig_loop, loop);
5232 if (!region_copy)
5234 region_copy = XNEWVEC (basic_block, n_region);
5235 free_region_copy = true;
5238 gcc_assert (!need_ssa_update_p (cfun));
5240 /* Record blocks outside the region that are dominated by something
5241 inside. */
5242 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5244 if (exit->src->count)
5246 total_count = exit->src->count;
5247 exit_count = exit->count;
5248 /* Fix up corner cases, to avoid division by zero or creation of negative
5249 frequencies. */
5250 if (exit_count > total_count)
5251 exit_count = total_count;
5253 else
5255 total_freq = exit->src->frequency;
5256 exit_freq = EDGE_FREQUENCY (exit);
5257 /* Fix up corner cases, to avoid division by zero or creation of negative
5258 frequencies. */
5259 if (total_freq == 0)
5260 total_freq = 1;
5261 if (exit_freq > total_freq)
5262 exit_freq = total_freq;
5265 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5266 split_edge_bb_loc (exit));
5267 if (total_count)
5269 scale_bbs_frequencies_gcov_type (region, n_region,
5270 total_count - exit_count,
5271 total_count);
5272 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5273 total_count);
5275 else
5277 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5278 total_freq);
5279 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5282 /* Create the switch block, and put the exit condition to it. */
5283 entry_bb = entry->dest;
5284 nentry_bb = get_bb_copy (entry_bb);
5285 if (!last_stmt (entry->src)
5286 || !stmt_ends_bb_p (last_stmt (entry->src)))
5287 switch_bb = entry->src;
5288 else
5289 switch_bb = split_edge (entry);
5290 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5292 gsi = gsi_last_bb (switch_bb);
5293 cond_stmt = last_stmt (exit->src);
5294 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5295 cond_stmt = gimple_copy (cond_stmt);
5297 /* If the block consisting of the exit condition has the latch as
5298 successor, then the body of the loop is executed before
5299 the exit condition is tested. In such case, moving the
5300 condition to the entry, causes that the loop will iterate
5301 one less iteration (which is the wanted outcome, since we
5302 peel out the last iteration). If the body is executed after
5303 the condition, moving the condition to the entry requires
5304 decrementing one iteration. */
5305 if (exits[1]->dest == orig_loop->latch)
5306 new_rhs = gimple_cond_rhs (cond_stmt);
5307 else
5309 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5310 gimple_cond_rhs (cond_stmt),
5311 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5313 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5315 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5316 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5317 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5318 break;
5320 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5321 NULL_TREE,false,GSI_CONTINUE_LINKING);
5324 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5325 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5326 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5328 sorig = single_succ_edge (switch_bb);
5329 sorig->flags = exits[1]->flags;
5330 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5332 /* Register the new edge from SWITCH_BB in loop exit lists. */
5333 rescan_loop_exit (snew, true, false);
5335 /* Add the PHI node arguments. */
5336 add_phi_args_after_copy (region_copy, n_region, snew);
5338 /* Get rid of now superfluous conditions and associated edges (and phi node
5339 arguments). */
5340 exit_bb = exit->dest;
5342 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5343 PENDING_STMT (e) = NULL;
5345 /* The latch of ORIG_LOOP was copied, and so was the backedge
5346 to the original header. We redirect this backedge to EXIT_BB. */
5347 for (i = 0; i < n_region; i++)
5348 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5350 gcc_assert (single_succ_edge (region_copy[i]));
5351 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5352 PENDING_STMT (e) = NULL;
5353 for (psi = gsi_start_phis (exit_bb);
5354 !gsi_end_p (psi);
5355 gsi_next (&psi))
5357 phi = gsi_stmt (psi);
5358 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5359 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5362 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5363 PENDING_STMT (e) = NULL;
5365 /* Anything that is outside of the region, but was dominated by something
5366 inside needs to update dominance info. */
5367 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5368 VEC_free (basic_block, heap, doms);
5369 /* Update the SSA web. */
5370 update_ssa (TODO_update_ssa);
5372 if (free_region_copy)
5373 free (region_copy);
5375 free_original_copy_tables ();
5376 return true;
5379 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5380 adding blocks when the dominator traversal reaches EXIT. This
5381 function silently assumes that ENTRY strictly dominates EXIT. */
5383 void
5384 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5385 VEC(basic_block,heap) **bbs_p)
5387 basic_block son;
5389 for (son = first_dom_son (CDI_DOMINATORS, entry);
5390 son;
5391 son = next_dom_son (CDI_DOMINATORS, son))
5393 VEC_safe_push (basic_block, heap, *bbs_p, son);
5394 if (son != exit)
5395 gather_blocks_in_sese_region (son, exit, bbs_p);
5399 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5400 The duplicates are recorded in VARS_MAP. */
5402 static void
5403 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5404 tree to_context)
5406 tree t = *tp, new_t;
5407 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5408 void **loc;
5410 if (DECL_CONTEXT (t) == to_context)
5411 return;
5413 loc = pointer_map_contains (vars_map, t);
5415 if (!loc)
5417 loc = pointer_map_insert (vars_map, t);
5419 if (SSA_VAR_P (t))
5421 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5422 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5424 else
5426 gcc_assert (TREE_CODE (t) == CONST_DECL);
5427 new_t = copy_node (t);
5429 DECL_CONTEXT (new_t) = to_context;
5431 *loc = new_t;
5433 else
5434 new_t = (tree) *loc;
5436 *tp = new_t;
5440 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5441 VARS_MAP maps old ssa names and var_decls to the new ones. */
5443 static tree
5444 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5445 tree to_context)
5447 void **loc;
5448 tree new_name, decl = SSA_NAME_VAR (name);
5450 gcc_assert (is_gimple_reg (name));
5452 loc = pointer_map_contains (vars_map, name);
5454 if (!loc)
5456 replace_by_duplicate_decl (&decl, vars_map, to_context);
5458 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5459 if (gimple_in_ssa_p (cfun))
5460 add_referenced_var (decl);
5462 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5463 if (SSA_NAME_IS_DEFAULT_DEF (name))
5464 set_default_def (decl, new_name);
5465 pop_cfun ();
5467 loc = pointer_map_insert (vars_map, name);
5468 *loc = new_name;
5470 else
5471 new_name = (tree) *loc;
5473 return new_name;
5476 struct move_stmt_d
5478 tree orig_block;
5479 tree new_block;
5480 tree from_context;
5481 tree to_context;
5482 struct pointer_map_t *vars_map;
5483 htab_t new_label_map;
5484 struct pointer_map_t *eh_map;
5485 bool remap_decls_p;
5488 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5489 contained in *TP if it has been ORIG_BLOCK previously and change the
5490 DECL_CONTEXT of every local variable referenced in *TP. */
5492 static tree
5493 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5495 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5496 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5497 tree t = *tp;
5499 if (EXPR_P (t))
5500 /* We should never have TREE_BLOCK set on non-statements. */
5501 gcc_assert (!TREE_BLOCK (t));
5503 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5505 if (TREE_CODE (t) == SSA_NAME)
5506 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5507 else if (TREE_CODE (t) == LABEL_DECL)
5509 if (p->new_label_map)
5511 struct tree_map in, *out;
5512 in.base.from = t;
5513 out = (struct tree_map *)
5514 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5515 if (out)
5516 *tp = t = out->to;
5519 DECL_CONTEXT (t) = p->to_context;
5521 else if (p->remap_decls_p)
5523 /* Replace T with its duplicate. T should no longer appear in the
5524 parent function, so this looks wasteful; however, it may appear
5525 in referenced_vars, and more importantly, as virtual operands of
5526 statements, and in alias lists of other variables. It would be
5527 quite difficult to expunge it from all those places. ??? It might
5528 suffice to do this for addressable variables. */
5529 if ((TREE_CODE (t) == VAR_DECL
5530 && !is_global_var (t))
5531 || TREE_CODE (t) == CONST_DECL)
5532 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5534 if (SSA_VAR_P (t)
5535 && gimple_in_ssa_p (cfun))
5537 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5538 add_referenced_var (*tp);
5539 pop_cfun ();
5542 *walk_subtrees = 0;
5544 else if (TYPE_P (t))
5545 *walk_subtrees = 0;
5547 return NULL_TREE;
5550 /* Helper for move_stmt_r. Given an EH region number for the source
5551 function, map that to the duplicate EH regio number in the dest. */
5553 static int
5554 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5556 eh_region old_r, new_r;
5557 void **slot;
5559 old_r = get_eh_region_from_number (old_nr);
5560 slot = pointer_map_contains (p->eh_map, old_r);
5561 new_r = (eh_region) *slot;
5563 return new_r->index;
5566 /* Similar, but operate on INTEGER_CSTs. */
5568 static tree
5569 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5571 int old_nr, new_nr;
5573 old_nr = tree_low_cst (old_t_nr, 0);
5574 new_nr = move_stmt_eh_region_nr (old_nr, p);
5576 return build_int_cst (NULL, new_nr);
5579 /* Like move_stmt_op, but for gimple statements.
5581 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5582 contained in the current statement in *GSI_P and change the
5583 DECL_CONTEXT of every local variable referenced in the current
5584 statement. */
5586 static tree
5587 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5588 struct walk_stmt_info *wi)
5590 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5591 gimple stmt = gsi_stmt (*gsi_p);
5592 tree block = gimple_block (stmt);
5594 if (p->orig_block == NULL_TREE
5595 || block == p->orig_block
5596 || block == NULL_TREE)
5597 gimple_set_block (stmt, p->new_block);
5598 #ifdef ENABLE_CHECKING
5599 else if (block != p->new_block)
5601 while (block && block != p->orig_block)
5602 block = BLOCK_SUPERCONTEXT (block);
5603 gcc_assert (block);
5605 #endif
5607 switch (gimple_code (stmt))
5609 case GIMPLE_CALL:
5610 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5612 tree r, fndecl = gimple_call_fndecl (stmt);
5613 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5614 switch (DECL_FUNCTION_CODE (fndecl))
5616 case BUILT_IN_EH_COPY_VALUES:
5617 r = gimple_call_arg (stmt, 1);
5618 r = move_stmt_eh_region_tree_nr (r, p);
5619 gimple_call_set_arg (stmt, 1, r);
5620 /* FALLTHRU */
5622 case BUILT_IN_EH_POINTER:
5623 case BUILT_IN_EH_FILTER:
5624 r = gimple_call_arg (stmt, 0);
5625 r = move_stmt_eh_region_tree_nr (r, p);
5626 gimple_call_set_arg (stmt, 0, r);
5627 break;
5629 default:
5630 break;
5633 break;
5635 case GIMPLE_RESX:
5637 int r = gimple_resx_region (stmt);
5638 r = move_stmt_eh_region_nr (r, p);
5639 gimple_resx_set_region (stmt, r);
5641 break;
5643 case GIMPLE_EH_DISPATCH:
5645 int r = gimple_eh_dispatch_region (stmt);
5646 r = move_stmt_eh_region_nr (r, p);
5647 gimple_eh_dispatch_set_region (stmt, r);
5649 break;
5651 case GIMPLE_OMP_RETURN:
5652 case GIMPLE_OMP_CONTINUE:
5653 break;
5654 default:
5655 if (is_gimple_omp (stmt))
5657 /* Do not remap variables inside OMP directives. Variables
5658 referenced in clauses and directive header belong to the
5659 parent function and should not be moved into the child
5660 function. */
5661 bool save_remap_decls_p = p->remap_decls_p;
5662 p->remap_decls_p = false;
5663 *handled_ops_p = true;
5665 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5666 move_stmt_op, wi);
5668 p->remap_decls_p = save_remap_decls_p;
5670 break;
5673 return NULL_TREE;
5676 /* Marks virtual operands of all statements in basic blocks BBS for
5677 renaming. */
5679 void
5680 mark_virtual_ops_in_bb (basic_block bb)
5682 gimple_stmt_iterator gsi;
5684 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5685 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5687 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5688 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5691 /* Move basic block BB from function CFUN to function DEST_FN. The
5692 block is moved out of the original linked list and placed after
5693 block AFTER in the new list. Also, the block is removed from the
5694 original array of blocks and placed in DEST_FN's array of blocks.
5695 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5696 updated to reflect the moved edges.
5698 The local variables are remapped to new instances, VARS_MAP is used
5699 to record the mapping. */
5701 static void
5702 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5703 basic_block after, bool update_edge_count_p,
5704 struct move_stmt_d *d)
5706 struct control_flow_graph *cfg;
5707 edge_iterator ei;
5708 edge e;
5709 gimple_stmt_iterator si;
5710 unsigned old_len, new_len;
5712 /* Remove BB from dominance structures. */
5713 delete_from_dominance_info (CDI_DOMINATORS, bb);
5714 if (current_loops)
5715 remove_bb_from_loops (bb);
5717 /* Link BB to the new linked list. */
5718 move_block_after (bb, after);
5720 /* Update the edge count in the corresponding flowgraphs. */
5721 if (update_edge_count_p)
5722 FOR_EACH_EDGE (e, ei, bb->succs)
5724 cfun->cfg->x_n_edges--;
5725 dest_cfun->cfg->x_n_edges++;
5728 /* Remove BB from the original basic block array. */
5729 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5730 cfun->cfg->x_n_basic_blocks--;
5732 /* Grow DEST_CFUN's basic block array if needed. */
5733 cfg = dest_cfun->cfg;
5734 cfg->x_n_basic_blocks++;
5735 if (bb->index >= cfg->x_last_basic_block)
5736 cfg->x_last_basic_block = bb->index + 1;
5738 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5739 if ((unsigned) cfg->x_last_basic_block >= old_len)
5741 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5742 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5743 new_len);
5746 VEC_replace (basic_block, cfg->x_basic_block_info,
5747 bb->index, bb);
5749 /* Remap the variables in phi nodes. */
5750 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5752 gimple phi = gsi_stmt (si);
5753 use_operand_p use;
5754 tree op = PHI_RESULT (phi);
5755 ssa_op_iter oi;
5757 if (!is_gimple_reg (op))
5759 /* Remove the phi nodes for virtual operands (alias analysis will be
5760 run for the new function, anyway). */
5761 remove_phi_node (&si, true);
5762 continue;
5765 SET_PHI_RESULT (phi,
5766 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5767 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5769 op = USE_FROM_PTR (use);
5770 if (TREE_CODE (op) == SSA_NAME)
5771 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5774 gsi_next (&si);
5777 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5779 gimple stmt = gsi_stmt (si);
5780 struct walk_stmt_info wi;
5782 memset (&wi, 0, sizeof (wi));
5783 wi.info = d;
5784 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5786 if (gimple_code (stmt) == GIMPLE_LABEL)
5788 tree label = gimple_label_label (stmt);
5789 int uid = LABEL_DECL_UID (label);
5791 gcc_assert (uid > -1);
5793 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5794 if (old_len <= (unsigned) uid)
5796 new_len = 3 * uid / 2 + 1;
5797 VEC_safe_grow_cleared (basic_block, gc,
5798 cfg->x_label_to_block_map, new_len);
5801 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5802 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5804 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5806 if (uid >= dest_cfun->cfg->last_label_uid)
5807 dest_cfun->cfg->last_label_uid = uid + 1;
5810 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5811 remove_stmt_from_eh_lp_fn (cfun, stmt);
5813 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5814 gimple_remove_stmt_histograms (cfun, stmt);
5816 /* We cannot leave any operands allocated from the operand caches of
5817 the current function. */
5818 free_stmt_operands (stmt);
5819 push_cfun (dest_cfun);
5820 update_stmt (stmt);
5821 pop_cfun ();
5824 FOR_EACH_EDGE (e, ei, bb->succs)
5825 if (e->goto_locus)
5827 tree block = e->goto_block;
5828 if (d->orig_block == NULL_TREE
5829 || block == d->orig_block)
5830 e->goto_block = d->new_block;
5831 #ifdef ENABLE_CHECKING
5832 else if (block != d->new_block)
5834 while (block && block != d->orig_block)
5835 block = BLOCK_SUPERCONTEXT (block);
5836 gcc_assert (block);
5838 #endif
5842 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5843 the outermost EH region. Use REGION as the incoming base EH region. */
5845 static eh_region
5846 find_outermost_region_in_block (struct function *src_cfun,
5847 basic_block bb, eh_region region)
5849 gimple_stmt_iterator si;
5851 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5853 gimple stmt = gsi_stmt (si);
5854 eh_region stmt_region;
5855 int lp_nr;
5857 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5858 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5859 if (stmt_region)
5861 if (region == NULL)
5862 region = stmt_region;
5863 else if (stmt_region != region)
5865 region = eh_region_outermost (src_cfun, stmt_region, region);
5866 gcc_assert (region != NULL);
5871 return region;
5874 static tree
5875 new_label_mapper (tree decl, void *data)
5877 htab_t hash = (htab_t) data;
5878 struct tree_map *m;
5879 void **slot;
5881 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5883 m = XNEW (struct tree_map);
5884 m->hash = DECL_UID (decl);
5885 m->base.from = decl;
5886 m->to = create_artificial_label (UNKNOWN_LOCATION);
5887 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5888 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5889 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5891 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5892 gcc_assert (*slot == NULL);
5894 *slot = m;
5896 return m->to;
5899 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5900 subblocks. */
5902 static void
5903 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5904 tree to_context)
5906 tree *tp, t;
5908 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5910 t = *tp;
5911 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5912 continue;
5913 replace_by_duplicate_decl (&t, vars_map, to_context);
5914 if (t != *tp)
5916 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5918 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5919 DECL_HAS_VALUE_EXPR_P (t) = 1;
5921 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5922 *tp = t;
5926 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5927 replace_block_vars_by_duplicates (block, vars_map, to_context);
5930 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5931 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5932 single basic block in the original CFG and the new basic block is
5933 returned. DEST_CFUN must not have a CFG yet.
5935 Note that the region need not be a pure SESE region. Blocks inside
5936 the region may contain calls to abort/exit. The only restriction
5937 is that ENTRY_BB should be the only entry point and it must
5938 dominate EXIT_BB.
5940 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5941 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5942 to the new function.
5944 All local variables referenced in the region are assumed to be in
5945 the corresponding BLOCK_VARS and unexpanded variable lists
5946 associated with DEST_CFUN. */
5948 basic_block
5949 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5950 basic_block exit_bb, tree orig_block)
5952 VEC(basic_block,heap) *bbs, *dom_bbs;
5953 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5954 basic_block after, bb, *entry_pred, *exit_succ, abb;
5955 struct function *saved_cfun = cfun;
5956 int *entry_flag, *exit_flag;
5957 unsigned *entry_prob, *exit_prob;
5958 unsigned i, num_entry_edges, num_exit_edges;
5959 edge e;
5960 edge_iterator ei;
5961 htab_t new_label_map;
5962 struct pointer_map_t *vars_map, *eh_map;
5963 struct loop *loop = entry_bb->loop_father;
5964 struct move_stmt_d d;
5966 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5967 region. */
5968 gcc_assert (entry_bb != exit_bb
5969 && (!exit_bb
5970 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5972 /* Collect all the blocks in the region. Manually add ENTRY_BB
5973 because it won't be added by dfs_enumerate_from. */
5974 bbs = NULL;
5975 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5976 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5978 /* The blocks that used to be dominated by something in BBS will now be
5979 dominated by the new block. */
5980 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5981 VEC_address (basic_block, bbs),
5982 VEC_length (basic_block, bbs));
5984 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5985 the predecessor edges to ENTRY_BB and the successor edges to
5986 EXIT_BB so that we can re-attach them to the new basic block that
5987 will replace the region. */
5988 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5989 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5990 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5991 entry_prob = XNEWVEC (unsigned, num_entry_edges);
5992 i = 0;
5993 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5995 entry_prob[i] = e->probability;
5996 entry_flag[i] = e->flags;
5997 entry_pred[i++] = e->src;
5998 remove_edge (e);
6001 if (exit_bb)
6003 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6004 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6005 sizeof (basic_block));
6006 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6007 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6008 i = 0;
6009 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6011 exit_prob[i] = e->probability;
6012 exit_flag[i] = e->flags;
6013 exit_succ[i++] = e->dest;
6014 remove_edge (e);
6017 else
6019 num_exit_edges = 0;
6020 exit_succ = NULL;
6021 exit_flag = NULL;
6022 exit_prob = NULL;
6025 /* Switch context to the child function to initialize DEST_FN's CFG. */
6026 gcc_assert (dest_cfun->cfg == NULL);
6027 push_cfun (dest_cfun);
6029 init_empty_tree_cfg ();
6031 /* Initialize EH information for the new function. */
6032 eh_map = NULL;
6033 new_label_map = NULL;
6034 if (saved_cfun->eh)
6036 eh_region region = NULL;
6038 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6039 region = find_outermost_region_in_block (saved_cfun, bb, region);
6041 init_eh_for_function ();
6042 if (region != NULL)
6044 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6045 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6046 new_label_mapper, new_label_map);
6050 pop_cfun ();
6052 /* Move blocks from BBS into DEST_CFUN. */
6053 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6054 after = dest_cfun->cfg->x_entry_block_ptr;
6055 vars_map = pointer_map_create ();
6057 memset (&d, 0, sizeof (d));
6058 d.orig_block = orig_block;
6059 d.new_block = DECL_INITIAL (dest_cfun->decl);
6060 d.from_context = cfun->decl;
6061 d.to_context = dest_cfun->decl;
6062 d.vars_map = vars_map;
6063 d.new_label_map = new_label_map;
6064 d.eh_map = eh_map;
6065 d.remap_decls_p = true;
6067 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6069 /* No need to update edge counts on the last block. It has
6070 already been updated earlier when we detached the region from
6071 the original CFG. */
6072 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6073 after = bb;
6076 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6077 if (orig_block)
6079 tree block;
6080 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6081 == NULL_TREE);
6082 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6083 = BLOCK_SUBBLOCKS (orig_block);
6084 for (block = BLOCK_SUBBLOCKS (orig_block);
6085 block; block = BLOCK_CHAIN (block))
6086 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6087 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6090 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6091 vars_map, dest_cfun->decl);
6093 if (new_label_map)
6094 htab_delete (new_label_map);
6095 if (eh_map)
6096 pointer_map_destroy (eh_map);
6097 pointer_map_destroy (vars_map);
6099 /* Rewire the entry and exit blocks. The successor to the entry
6100 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6101 the child function. Similarly, the predecessor of DEST_FN's
6102 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6103 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6104 various CFG manipulation function get to the right CFG.
6106 FIXME, this is silly. The CFG ought to become a parameter to
6107 these helpers. */
6108 push_cfun (dest_cfun);
6109 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6110 if (exit_bb)
6111 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6112 pop_cfun ();
6114 /* Back in the original function, the SESE region has disappeared,
6115 create a new basic block in its place. */
6116 bb = create_empty_bb (entry_pred[0]);
6117 if (current_loops)
6118 add_bb_to_loop (bb, loop);
6119 for (i = 0; i < num_entry_edges; i++)
6121 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6122 e->probability = entry_prob[i];
6125 for (i = 0; i < num_exit_edges; i++)
6127 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6128 e->probability = exit_prob[i];
6131 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6132 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6133 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6134 VEC_free (basic_block, heap, dom_bbs);
6136 if (exit_bb)
6138 free (exit_prob);
6139 free (exit_flag);
6140 free (exit_succ);
6142 free (entry_prob);
6143 free (entry_flag);
6144 free (entry_pred);
6145 VEC_free (basic_block, heap, bbs);
6147 return bb;
6151 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6154 void
6155 dump_function_to_file (tree fn, FILE *file, int flags)
6157 tree arg, vars, var;
6158 struct function *dsf;
6159 bool ignore_topmost_bind = false, any_var = false;
6160 basic_block bb;
6161 tree chain;
6163 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6165 arg = DECL_ARGUMENTS (fn);
6166 while (arg)
6168 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6169 fprintf (file, " ");
6170 print_generic_expr (file, arg, dump_flags);
6171 if (flags & TDF_VERBOSE)
6172 print_node (file, "", arg, 4);
6173 if (TREE_CHAIN (arg))
6174 fprintf (file, ", ");
6175 arg = TREE_CHAIN (arg);
6177 fprintf (file, ")\n");
6179 if (flags & TDF_VERBOSE)
6180 print_node (file, "", fn, 2);
6182 dsf = DECL_STRUCT_FUNCTION (fn);
6183 if (dsf && (flags & TDF_EH))
6184 dump_eh_tree (file, dsf);
6186 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6188 dump_node (fn, TDF_SLIM | flags, file);
6189 return;
6192 /* Switch CFUN to point to FN. */
6193 push_cfun (DECL_STRUCT_FUNCTION (fn));
6195 /* When GIMPLE is lowered, the variables are no longer available in
6196 BIND_EXPRs, so display them separately. */
6197 if (cfun && cfun->decl == fn && cfun->local_decls)
6199 ignore_topmost_bind = true;
6201 fprintf (file, "{\n");
6202 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6204 var = TREE_VALUE (vars);
6206 print_generic_decl (file, var, flags);
6207 if (flags & TDF_VERBOSE)
6208 print_node (file, "", var, 4);
6209 fprintf (file, "\n");
6211 any_var = true;
6215 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6217 /* If the CFG has been built, emit a CFG-based dump. */
6218 check_bb_profile (ENTRY_BLOCK_PTR, file);
6219 if (!ignore_topmost_bind)
6220 fprintf (file, "{\n");
6222 if (any_var && n_basic_blocks)
6223 fprintf (file, "\n");
6225 FOR_EACH_BB (bb)
6226 gimple_dump_bb (bb, file, 2, flags);
6228 fprintf (file, "}\n");
6229 check_bb_profile (EXIT_BLOCK_PTR, file);
6231 else if (DECL_SAVED_TREE (fn) == NULL)
6233 /* The function is now in GIMPLE form but the CFG has not been
6234 built yet. Emit the single sequence of GIMPLE statements
6235 that make up its body. */
6236 gimple_seq body = gimple_body (fn);
6238 if (gimple_seq_first_stmt (body)
6239 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6240 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6241 print_gimple_seq (file, body, 0, flags);
6242 else
6244 if (!ignore_topmost_bind)
6245 fprintf (file, "{\n");
6247 if (any_var)
6248 fprintf (file, "\n");
6250 print_gimple_seq (file, body, 2, flags);
6251 fprintf (file, "}\n");
6254 else
6256 int indent;
6258 /* Make a tree based dump. */
6259 chain = DECL_SAVED_TREE (fn);
6261 if (chain && TREE_CODE (chain) == BIND_EXPR)
6263 if (ignore_topmost_bind)
6265 chain = BIND_EXPR_BODY (chain);
6266 indent = 2;
6268 else
6269 indent = 0;
6271 else
6273 if (!ignore_topmost_bind)
6274 fprintf (file, "{\n");
6275 indent = 2;
6278 if (any_var)
6279 fprintf (file, "\n");
6281 print_generic_stmt_indented (file, chain, flags, indent);
6282 if (ignore_topmost_bind)
6283 fprintf (file, "}\n");
6286 fprintf (file, "\n\n");
6288 /* Restore CFUN. */
6289 pop_cfun ();
6293 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6295 void
6296 debug_function (tree fn, int flags)
6298 dump_function_to_file (fn, stderr, flags);
6302 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6304 static void
6305 print_pred_bbs (FILE *file, basic_block bb)
6307 edge e;
6308 edge_iterator ei;
6310 FOR_EACH_EDGE (e, ei, bb->preds)
6311 fprintf (file, "bb_%d ", e->src->index);
6315 /* Print on FILE the indexes for the successors of basic_block BB. */
6317 static void
6318 print_succ_bbs (FILE *file, basic_block bb)
6320 edge e;
6321 edge_iterator ei;
6323 FOR_EACH_EDGE (e, ei, bb->succs)
6324 fprintf (file, "bb_%d ", e->dest->index);
6327 /* Print to FILE the basic block BB following the VERBOSITY level. */
6329 void
6330 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6332 char *s_indent = (char *) alloca ((size_t) indent + 1);
6333 memset ((void *) s_indent, ' ', (size_t) indent);
6334 s_indent[indent] = '\0';
6336 /* Print basic_block's header. */
6337 if (verbosity >= 2)
6339 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6340 print_pred_bbs (file, bb);
6341 fprintf (file, "}, succs = {");
6342 print_succ_bbs (file, bb);
6343 fprintf (file, "})\n");
6346 /* Print basic_block's body. */
6347 if (verbosity >= 3)
6349 fprintf (file, "%s {\n", s_indent);
6350 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6351 fprintf (file, "%s }\n", s_indent);
6355 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6357 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6358 VERBOSITY level this outputs the contents of the loop, or just its
6359 structure. */
6361 static void
6362 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6364 char *s_indent;
6365 basic_block bb;
6367 if (loop == NULL)
6368 return;
6370 s_indent = (char *) alloca ((size_t) indent + 1);
6371 memset ((void *) s_indent, ' ', (size_t) indent);
6372 s_indent[indent] = '\0';
6374 /* Print loop's header. */
6375 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6376 loop->num, loop->header->index, loop->latch->index);
6377 fprintf (file, ", niter = ");
6378 print_generic_expr (file, loop->nb_iterations, 0);
6380 if (loop->any_upper_bound)
6382 fprintf (file, ", upper_bound = ");
6383 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6386 if (loop->any_estimate)
6388 fprintf (file, ", estimate = ");
6389 dump_double_int (file, loop->nb_iterations_estimate, true);
6391 fprintf (file, ")\n");
6393 /* Print loop's body. */
6394 if (verbosity >= 1)
6396 fprintf (file, "%s{\n", s_indent);
6397 FOR_EACH_BB (bb)
6398 if (bb->loop_father == loop)
6399 print_loops_bb (file, bb, indent, verbosity);
6401 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6402 fprintf (file, "%s}\n", s_indent);
6406 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6407 spaces. Following VERBOSITY level this outputs the contents of the
6408 loop, or just its structure. */
6410 static void
6411 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6413 if (loop == NULL)
6414 return;
6416 print_loop (file, loop, indent, verbosity);
6417 print_loop_and_siblings (file, loop->next, indent, verbosity);
6420 /* Follow a CFG edge from the entry point of the program, and on entry
6421 of a loop, pretty print the loop structure on FILE. */
6423 void
6424 print_loops (FILE *file, int verbosity)
6426 basic_block bb;
6428 bb = ENTRY_BLOCK_PTR;
6429 if (bb && bb->loop_father)
6430 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6434 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6436 void
6437 debug_loops (int verbosity)
6439 print_loops (stderr, verbosity);
6442 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6444 void
6445 debug_loop (struct loop *loop, int verbosity)
6447 print_loop (stderr, loop, 0, verbosity);
6450 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6451 level. */
6453 void
6454 debug_loop_num (unsigned num, int verbosity)
6456 debug_loop (get_loop (num), verbosity);
6459 /* Return true if BB ends with a call, possibly followed by some
6460 instructions that must stay with the call. Return false,
6461 otherwise. */
6463 static bool
6464 gimple_block_ends_with_call_p (basic_block bb)
6466 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6467 return is_gimple_call (gsi_stmt (gsi));
6471 /* Return true if BB ends with a conditional branch. Return false,
6472 otherwise. */
6474 static bool
6475 gimple_block_ends_with_condjump_p (const_basic_block bb)
6477 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6478 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6482 /* Return true if we need to add fake edge to exit at statement T.
6483 Helper function for gimple_flow_call_edges_add. */
6485 static bool
6486 need_fake_edge_p (gimple t)
6488 tree fndecl = NULL_TREE;
6489 int call_flags = 0;
6491 /* NORETURN and LONGJMP calls already have an edge to exit.
6492 CONST and PURE calls do not need one.
6493 We don't currently check for CONST and PURE here, although
6494 it would be a good idea, because those attributes are
6495 figured out from the RTL in mark_constant_function, and
6496 the counter incrementation code from -fprofile-arcs
6497 leads to different results from -fbranch-probabilities. */
6498 if (is_gimple_call (t))
6500 fndecl = gimple_call_fndecl (t);
6501 call_flags = gimple_call_flags (t);
6504 if (is_gimple_call (t)
6505 && fndecl
6506 && DECL_BUILT_IN (fndecl)
6507 && (call_flags & ECF_NOTHROW)
6508 && !(call_flags & ECF_RETURNS_TWICE)
6509 /* fork() doesn't really return twice, but the effect of
6510 wrapping it in __gcov_fork() which calls __gcov_flush()
6511 and clears the counters before forking has the same
6512 effect as returning twice. Force a fake edge. */
6513 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6514 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6515 return false;
6517 if (is_gimple_call (t)
6518 && !(call_flags & ECF_NORETURN))
6519 return true;
6521 if (gimple_code (t) == GIMPLE_ASM
6522 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6523 return true;
6525 return false;
6529 /* Add fake edges to the function exit for any non constant and non
6530 noreturn calls, volatile inline assembly in the bitmap of blocks
6531 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6532 the number of blocks that were split.
6534 The goal is to expose cases in which entering a basic block does
6535 not imply that all subsequent instructions must be executed. */
6537 static int
6538 gimple_flow_call_edges_add (sbitmap blocks)
6540 int i;
6541 int blocks_split = 0;
6542 int last_bb = last_basic_block;
6543 bool check_last_block = false;
6545 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6546 return 0;
6548 if (! blocks)
6549 check_last_block = true;
6550 else
6551 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6553 /* In the last basic block, before epilogue generation, there will be
6554 a fallthru edge to EXIT. Special care is required if the last insn
6555 of the last basic block is a call because make_edge folds duplicate
6556 edges, which would result in the fallthru edge also being marked
6557 fake, which would result in the fallthru edge being removed by
6558 remove_fake_edges, which would result in an invalid CFG.
6560 Moreover, we can't elide the outgoing fake edge, since the block
6561 profiler needs to take this into account in order to solve the minimal
6562 spanning tree in the case that the call doesn't return.
6564 Handle this by adding a dummy instruction in a new last basic block. */
6565 if (check_last_block)
6567 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6568 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6569 gimple t = NULL;
6571 if (!gsi_end_p (gsi))
6572 t = gsi_stmt (gsi);
6574 if (t && need_fake_edge_p (t))
6576 edge e;
6578 e = find_edge (bb, EXIT_BLOCK_PTR);
6579 if (e)
6581 gsi_insert_on_edge (e, gimple_build_nop ());
6582 gsi_commit_edge_inserts ();
6587 /* Now add fake edges to the function exit for any non constant
6588 calls since there is no way that we can determine if they will
6589 return or not... */
6590 for (i = 0; i < last_bb; i++)
6592 basic_block bb = BASIC_BLOCK (i);
6593 gimple_stmt_iterator gsi;
6594 gimple stmt, last_stmt;
6596 if (!bb)
6597 continue;
6599 if (blocks && !TEST_BIT (blocks, i))
6600 continue;
6602 gsi = gsi_last_bb (bb);
6603 if (!gsi_end_p (gsi))
6605 last_stmt = gsi_stmt (gsi);
6608 stmt = gsi_stmt (gsi);
6609 if (need_fake_edge_p (stmt))
6611 edge e;
6613 /* The handling above of the final block before the
6614 epilogue should be enough to verify that there is
6615 no edge to the exit block in CFG already.
6616 Calling make_edge in such case would cause us to
6617 mark that edge as fake and remove it later. */
6618 #ifdef ENABLE_CHECKING
6619 if (stmt == last_stmt)
6621 e = find_edge (bb, EXIT_BLOCK_PTR);
6622 gcc_assert (e == NULL);
6624 #endif
6626 /* Note that the following may create a new basic block
6627 and renumber the existing basic blocks. */
6628 if (stmt != last_stmt)
6630 e = split_block (bb, stmt);
6631 if (e)
6632 blocks_split++;
6634 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6636 gsi_prev (&gsi);
6638 while (!gsi_end_p (gsi));
6642 if (blocks_split)
6643 verify_flow_info ();
6645 return blocks_split;
6648 /* Purge dead abnormal call edges from basic block BB. */
6650 bool
6651 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6653 bool changed = gimple_purge_dead_eh_edges (bb);
6655 if (cfun->has_nonlocal_label)
6657 gimple stmt = last_stmt (bb);
6658 edge_iterator ei;
6659 edge e;
6661 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6662 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6664 if (e->flags & EDGE_ABNORMAL)
6666 remove_edge (e);
6667 changed = true;
6669 else
6670 ei_next (&ei);
6673 /* See gimple_purge_dead_eh_edges below. */
6674 if (changed)
6675 free_dominance_info (CDI_DOMINATORS);
6678 return changed;
6681 /* Removes edge E and all the blocks dominated by it, and updates dominance
6682 information. The IL in E->src needs to be updated separately.
6683 If dominance info is not available, only the edge E is removed.*/
6685 void
6686 remove_edge_and_dominated_blocks (edge e)
6688 VEC (basic_block, heap) *bbs_to_remove = NULL;
6689 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6690 bitmap df, df_idom;
6691 edge f;
6692 edge_iterator ei;
6693 bool none_removed = false;
6694 unsigned i;
6695 basic_block bb, dbb;
6696 bitmap_iterator bi;
6698 if (!dom_info_available_p (CDI_DOMINATORS))
6700 remove_edge (e);
6701 return;
6704 /* No updating is needed for edges to exit. */
6705 if (e->dest == EXIT_BLOCK_PTR)
6707 if (cfgcleanup_altered_bbs)
6708 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6709 remove_edge (e);
6710 return;
6713 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6714 that is not dominated by E->dest, then this set is empty. Otherwise,
6715 all the basic blocks dominated by E->dest are removed.
6717 Also, to DF_IDOM we store the immediate dominators of the blocks in
6718 the dominance frontier of E (i.e., of the successors of the
6719 removed blocks, if there are any, and of E->dest otherwise). */
6720 FOR_EACH_EDGE (f, ei, e->dest->preds)
6722 if (f == e)
6723 continue;
6725 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6727 none_removed = true;
6728 break;
6732 df = BITMAP_ALLOC (NULL);
6733 df_idom = BITMAP_ALLOC (NULL);
6735 if (none_removed)
6736 bitmap_set_bit (df_idom,
6737 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6738 else
6740 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6741 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6743 FOR_EACH_EDGE (f, ei, bb->succs)
6745 if (f->dest != EXIT_BLOCK_PTR)
6746 bitmap_set_bit (df, f->dest->index);
6749 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6750 bitmap_clear_bit (df, bb->index);
6752 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6754 bb = BASIC_BLOCK (i);
6755 bitmap_set_bit (df_idom,
6756 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6760 if (cfgcleanup_altered_bbs)
6762 /* Record the set of the altered basic blocks. */
6763 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6764 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6767 /* Remove E and the cancelled blocks. */
6768 if (none_removed)
6769 remove_edge (e);
6770 else
6772 /* Walk backwards so as to get a chance to substitute all
6773 released DEFs into debug stmts. See
6774 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6775 details. */
6776 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6777 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6780 /* Update the dominance information. The immediate dominator may change only
6781 for blocks whose immediate dominator belongs to DF_IDOM:
6783 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6784 removal. Let Z the arbitrary block such that idom(Z) = Y and
6785 Z dominates X after the removal. Before removal, there exists a path P
6786 from Y to X that avoids Z. Let F be the last edge on P that is
6787 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6788 dominates W, and because of P, Z does not dominate W), and W belongs to
6789 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6790 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6792 bb = BASIC_BLOCK (i);
6793 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6794 dbb;
6795 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6796 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6799 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6801 BITMAP_FREE (df);
6802 BITMAP_FREE (df_idom);
6803 VEC_free (basic_block, heap, bbs_to_remove);
6804 VEC_free (basic_block, heap, bbs_to_fix_dom);
6807 /* Purge dead EH edges from basic block BB. */
6809 bool
6810 gimple_purge_dead_eh_edges (basic_block bb)
6812 bool changed = false;
6813 edge e;
6814 edge_iterator ei;
6815 gimple stmt = last_stmt (bb);
6817 if (stmt && stmt_can_throw_internal (stmt))
6818 return false;
6820 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6822 if (e->flags & EDGE_EH)
6824 remove_edge_and_dominated_blocks (e);
6825 changed = true;
6827 else
6828 ei_next (&ei);
6831 return changed;
6834 bool
6835 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6837 bool changed = false;
6838 unsigned i;
6839 bitmap_iterator bi;
6841 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6843 basic_block bb = BASIC_BLOCK (i);
6845 /* Earlier gimple_purge_dead_eh_edges could have removed
6846 this basic block already. */
6847 gcc_assert (bb || changed);
6848 if (bb != NULL)
6849 changed |= gimple_purge_dead_eh_edges (bb);
6852 return changed;
6855 /* This function is called whenever a new edge is created or
6856 redirected. */
6858 static void
6859 gimple_execute_on_growing_pred (edge e)
6861 basic_block bb = e->dest;
6863 if (!gimple_seq_empty_p (phi_nodes (bb)))
6864 reserve_phi_args_for_new_edge (bb);
6867 /* This function is called immediately before edge E is removed from
6868 the edge vector E->dest->preds. */
6870 static void
6871 gimple_execute_on_shrinking_pred (edge e)
6873 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
6874 remove_phi_args (e);
6877 /*---------------------------------------------------------------------------
6878 Helper functions for Loop versioning
6879 ---------------------------------------------------------------------------*/
6881 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6882 of 'first'. Both of them are dominated by 'new_head' basic block. When
6883 'new_head' was created by 'second's incoming edge it received phi arguments
6884 on the edge by split_edge(). Later, additional edge 'e' was created to
6885 connect 'new_head' and 'first'. Now this routine adds phi args on this
6886 additional edge 'e' that new_head to second edge received as part of edge
6887 splitting. */
6889 static void
6890 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6891 basic_block new_head, edge e)
6893 gimple phi1, phi2;
6894 gimple_stmt_iterator psi1, psi2;
6895 tree def;
6896 edge e2 = find_edge (new_head, second);
6898 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6899 edge, we should always have an edge from NEW_HEAD to SECOND. */
6900 gcc_assert (e2 != NULL);
6902 /* Browse all 'second' basic block phi nodes and add phi args to
6903 edge 'e' for 'first' head. PHI args are always in correct order. */
6905 for (psi2 = gsi_start_phis (second),
6906 psi1 = gsi_start_phis (first);
6907 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6908 gsi_next (&psi2), gsi_next (&psi1))
6910 phi1 = gsi_stmt (psi1);
6911 phi2 = gsi_stmt (psi2);
6912 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6913 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
6918 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6919 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6920 the destination of the ELSE part. */
6922 static void
6923 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6924 basic_block second_head ATTRIBUTE_UNUSED,
6925 basic_block cond_bb, void *cond_e)
6927 gimple_stmt_iterator gsi;
6928 gimple new_cond_expr;
6929 tree cond_expr = (tree) cond_e;
6930 edge e0;
6932 /* Build new conditional expr */
6933 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6934 NULL_TREE, NULL_TREE);
6936 /* Add new cond in cond_bb. */
6937 gsi = gsi_last_bb (cond_bb);
6938 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6940 /* Adjust edges appropriately to connect new head with first head
6941 as well as second head. */
6942 e0 = single_succ_edge (cond_bb);
6943 e0->flags &= ~EDGE_FALLTHRU;
6944 e0->flags |= EDGE_FALSE_VALUE;
6947 struct cfg_hooks gimple_cfg_hooks = {
6948 "gimple",
6949 gimple_verify_flow_info,
6950 gimple_dump_bb, /* dump_bb */
6951 create_bb, /* create_basic_block */
6952 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6953 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6954 gimple_can_remove_branch_p, /* can_remove_branch_p */
6955 remove_bb, /* delete_basic_block */
6956 gimple_split_block, /* split_block */
6957 gimple_move_block_after, /* move_block_after */
6958 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6959 gimple_merge_blocks, /* merge_blocks */
6960 gimple_predict_edge, /* predict_edge */
6961 gimple_predicted_by_p, /* predicted_by_p */
6962 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6963 gimple_duplicate_bb, /* duplicate_block */
6964 gimple_split_edge, /* split_edge */
6965 gimple_make_forwarder_block, /* make_forward_block */
6966 NULL, /* tidy_fallthru_edge */
6967 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6968 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6969 gimple_flow_call_edges_add, /* flow_call_edges_add */
6970 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
6971 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6972 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6973 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6974 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6975 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6976 flush_pending_stmts /* flush_pending_stmts */
6980 /* Split all critical edges. */
6982 static unsigned int
6983 split_critical_edges (void)
6985 basic_block bb;
6986 edge e;
6987 edge_iterator ei;
6989 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6990 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6991 mappings around the calls to split_edge. */
6992 start_recording_case_labels ();
6993 FOR_ALL_BB (bb)
6995 FOR_EACH_EDGE (e, ei, bb->succs)
6997 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6998 split_edge (e);
6999 /* PRE inserts statements to edges and expects that
7000 since split_critical_edges was done beforehand, committing edge
7001 insertions will not split more edges. In addition to critical
7002 edges we must split edges that have multiple successors and
7003 end by control flow statements, such as RESX.
7004 Go ahead and split them too. This matches the logic in
7005 gimple_find_edge_insert_loc. */
7006 else if ((!single_pred_p (e->dest)
7007 || !gimple_seq_empty_p (phi_nodes (e->dest))
7008 || e->dest == EXIT_BLOCK_PTR)
7009 && e->src != ENTRY_BLOCK_PTR
7010 && !(e->flags & EDGE_ABNORMAL))
7012 gimple_stmt_iterator gsi;
7014 gsi = gsi_last_bb (e->src);
7015 if (!gsi_end_p (gsi)
7016 && stmt_ends_bb_p (gsi_stmt (gsi))
7017 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7018 split_edge (e);
7022 end_recording_case_labels ();
7023 return 0;
7026 struct gimple_opt_pass pass_split_crit_edges =
7029 GIMPLE_PASS,
7030 "crited", /* name */
7031 NULL, /* gate */
7032 split_critical_edges, /* execute */
7033 NULL, /* sub */
7034 NULL, /* next */
7035 0, /* static_pass_number */
7036 TV_TREE_SPLIT_EDGES, /* tv_id */
7037 PROP_cfg, /* properties required */
7038 PROP_no_crit_edges, /* properties_provided */
7039 0, /* properties_destroyed */
7040 0, /* todo_flags_start */
7041 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7046 /* Build a ternary operation and gimplify it. Emit code before GSI.
7047 Return the gimple_val holding the result. */
7049 tree
7050 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7051 tree type, tree a, tree b, tree c)
7053 tree ret;
7054 location_t loc = gimple_location (gsi_stmt (*gsi));
7056 ret = fold_build3_loc (loc, code, type, a, b, c);
7057 STRIP_NOPS (ret);
7059 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7060 GSI_SAME_STMT);
7063 /* Build a binary operation and gimplify it. Emit code before GSI.
7064 Return the gimple_val holding the result. */
7066 tree
7067 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7068 tree type, tree a, tree b)
7070 tree ret;
7072 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7073 STRIP_NOPS (ret);
7075 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7076 GSI_SAME_STMT);
7079 /* Build a unary operation and gimplify it. Emit code before GSI.
7080 Return the gimple_val holding the result. */
7082 tree
7083 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7084 tree a)
7086 tree ret;
7088 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7089 STRIP_NOPS (ret);
7091 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7092 GSI_SAME_STMT);
7097 /* Emit return warnings. */
7099 static unsigned int
7100 execute_warn_function_return (void)
7102 source_location location;
7103 gimple last;
7104 edge e;
7105 edge_iterator ei;
7107 /* If we have a path to EXIT, then we do return. */
7108 if (TREE_THIS_VOLATILE (cfun->decl)
7109 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7111 location = UNKNOWN_LOCATION;
7112 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7114 last = last_stmt (e->src);
7115 if (gimple_code (last) == GIMPLE_RETURN
7116 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7117 break;
7119 if (location == UNKNOWN_LOCATION)
7120 location = cfun->function_end_locus;
7121 warning_at (location, 0, "%<noreturn%> function does return");
7124 /* If we see "return;" in some basic block, then we do reach the end
7125 without returning a value. */
7126 else if (warn_return_type
7127 && !TREE_NO_WARNING (cfun->decl)
7128 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7129 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7131 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7133 gimple last = last_stmt (e->src);
7134 if (gimple_code (last) == GIMPLE_RETURN
7135 && gimple_return_retval (last) == NULL
7136 && !gimple_no_warning_p (last))
7138 location = gimple_location (last);
7139 if (location == UNKNOWN_LOCATION)
7140 location = cfun->function_end_locus;
7141 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7142 TREE_NO_WARNING (cfun->decl) = 1;
7143 break;
7147 return 0;
7151 /* Given a basic block B which ends with a conditional and has
7152 precisely two successors, determine which of the edges is taken if
7153 the conditional is true and which is taken if the conditional is
7154 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7156 void
7157 extract_true_false_edges_from_block (basic_block b,
7158 edge *true_edge,
7159 edge *false_edge)
7161 edge e = EDGE_SUCC (b, 0);
7163 if (e->flags & EDGE_TRUE_VALUE)
7165 *true_edge = e;
7166 *false_edge = EDGE_SUCC (b, 1);
7168 else
7170 *false_edge = e;
7171 *true_edge = EDGE_SUCC (b, 1);
7175 struct gimple_opt_pass pass_warn_function_return =
7178 GIMPLE_PASS,
7179 "*warn_function_return", /* name */
7180 NULL, /* gate */
7181 execute_warn_function_return, /* execute */
7182 NULL, /* sub */
7183 NULL, /* next */
7184 0, /* static_pass_number */
7185 TV_NONE, /* tv_id */
7186 PROP_cfg, /* properties_required */
7187 0, /* properties_provided */
7188 0, /* properties_destroyed */
7189 0, /* todo_flags_start */
7190 0 /* todo_flags_finish */
7194 /* Emit noreturn warnings. */
7196 static unsigned int
7197 execute_warn_function_noreturn (void)
7199 if (warn_missing_noreturn
7200 && !TREE_THIS_VOLATILE (cfun->decl)
7201 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7202 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7203 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7204 "function might be possible candidate "
7205 "for attribute %<noreturn%>");
7206 return 0;
7209 struct gimple_opt_pass pass_warn_function_noreturn =
7212 GIMPLE_PASS,
7213 "*warn_function_noreturn", /* name */
7214 NULL, /* gate */
7215 execute_warn_function_noreturn, /* execute */
7216 NULL, /* sub */
7217 NULL, /* next */
7218 0, /* static_pass_number */
7219 TV_NONE, /* tv_id */
7220 PROP_cfg, /* properties_required */
7221 0, /* properties_provided */
7222 0, /* properties_destroyed */
7223 0, /* todo_flags_start */
7224 0 /* todo_flags_finish */
7229 /* Walk a gimplified function and warn for functions whose return value is
7230 ignored and attribute((warn_unused_result)) is set. This is done before
7231 inlining, so we don't have to worry about that. */
7233 static void
7234 do_warn_unused_result (gimple_seq seq)
7236 tree fdecl, ftype;
7237 gimple_stmt_iterator i;
7239 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7241 gimple g = gsi_stmt (i);
7243 switch (gimple_code (g))
7245 case GIMPLE_BIND:
7246 do_warn_unused_result (gimple_bind_body (g));
7247 break;
7248 case GIMPLE_TRY:
7249 do_warn_unused_result (gimple_try_eval (g));
7250 do_warn_unused_result (gimple_try_cleanup (g));
7251 break;
7252 case GIMPLE_CATCH:
7253 do_warn_unused_result (gimple_catch_handler (g));
7254 break;
7255 case GIMPLE_EH_FILTER:
7256 do_warn_unused_result (gimple_eh_filter_failure (g));
7257 break;
7259 case GIMPLE_CALL:
7260 if (gimple_call_lhs (g))
7261 break;
7263 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7264 LHS. All calls whose value is ignored should be
7265 represented like this. Look for the attribute. */
7266 fdecl = gimple_call_fndecl (g);
7267 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7269 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7271 location_t loc = gimple_location (g);
7273 if (fdecl)
7274 warning_at (loc, OPT_Wunused_result,
7275 "ignoring return value of %qD, "
7276 "declared with attribute warn_unused_result",
7277 fdecl);
7278 else
7279 warning_at (loc, OPT_Wunused_result,
7280 "ignoring return value of function "
7281 "declared with attribute warn_unused_result");
7283 break;
7285 default:
7286 /* Not a container, not a call, or a call whose value is used. */
7287 break;
7292 static unsigned int
7293 run_warn_unused_result (void)
7295 do_warn_unused_result (gimple_body (current_function_decl));
7296 return 0;
7299 static bool
7300 gate_warn_unused_result (void)
7302 return flag_warn_unused_result;
7305 struct gimple_opt_pass pass_warn_unused_result =
7308 GIMPLE_PASS,
7309 "*warn_unused_result", /* name */
7310 gate_warn_unused_result, /* gate */
7311 run_warn_unused_result, /* execute */
7312 NULL, /* sub */
7313 NULL, /* next */
7314 0, /* static_pass_number */
7315 TV_NONE, /* tv_id */
7316 PROP_gimple_any, /* properties_required */
7317 0, /* properties_provided */
7318 0, /* properties_destroyed */
7319 0, /* todo_flags_start */
7320 0, /* todo_flags_finish */