* gcc.target/x86_64/abi/avx/asm-support.S (snapshot_ret): Preserve
[official-gcc/alias-decl.git] / gcc / tree-cfg.c
blob4ba9dd7325bf078ba713fe8b9f6382c6ba9859bc
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
74 /* CFG statistics. */
75 struct cfg_stats_d
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
88 location_t locus;
89 int discriminator;
91 static htab_t discriminator_per_locus;
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static void make_gimple_asm_edges (basic_block);
103 static unsigned int locus_map_hash (const void *);
104 static int locus_map_eq (const void *, const void *);
105 static void assign_discriminator (location_t, basic_block);
106 static edge gimple_redirect_edge_and_branch (edge, basic_block);
107 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (gimple, gimple);
112 static int gimple_verify_flow_info (void);
113 static void gimple_make_forwarder_block (edge);
114 static void gimple_cfg2vcg (FILE *);
115 static gimple first_non_label_stmt (basic_block);
117 /* Flowgraph optimization and cleanup. */
118 static void gimple_merge_blocks (basic_block, basic_block);
119 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (gimple, tree);
126 void
127 init_empty_tree_cfg_for_function (struct function *fn)
129 /* Initialize the basic block array. */
130 init_flow (fn);
131 profile_status_for_function (fn) = PROFILE_ABSENT;
132 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134 basic_block_info_for_function (fn)
135 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
136 VEC_safe_grow_cleared (basic_block, gc,
137 basic_block_info_for_function (fn),
138 initial_cfg_capacity);
140 /* Build a mapping of labels to their associated blocks. */
141 label_to_block_map_for_function (fn)
142 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
143 VEC_safe_grow_cleared (basic_block, gc,
144 label_to_block_map_for_function (fn),
145 initial_cfg_capacity);
147 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
148 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
149 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
150 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
152 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
153 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
154 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
155 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
158 void
159 init_empty_tree_cfg (void)
161 init_empty_tree_cfg_for_function (cfun);
164 /*---------------------------------------------------------------------------
165 Create basic blocks
166 ---------------------------------------------------------------------------*/
168 /* Entry point to the CFG builder for trees. SEQ is the sequence of
169 statements to be added to the flowgraph. */
171 static void
172 build_gimple_cfg (gimple_seq seq)
174 /* Register specific gimple functions. */
175 gimple_register_cfg_hooks ();
177 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
179 init_empty_tree_cfg ();
181 found_computed_goto = 0;
182 make_blocks (seq);
184 /* Computed gotos are hell to deal with, especially if there are
185 lots of them with a large number of destinations. So we factor
186 them to a common computed goto location before we build the
187 edge list. After we convert back to normal form, we will un-factor
188 the computed gotos since factoring introduces an unwanted jump. */
189 if (found_computed_goto)
190 factor_computed_gotos ();
192 /* Make sure there is always at least one block, even if it's empty. */
193 if (n_basic_blocks == NUM_FIXED_BLOCKS)
194 create_empty_bb (ENTRY_BLOCK_PTR);
196 /* Adjust the size of the array. */
197 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
198 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
200 /* To speed up statement iterator walks, we first purge dead labels. */
201 cleanup_dead_labels ();
203 /* Group case nodes to reduce the number of edges.
204 We do this after cleaning up dead labels because otherwise we miss
205 a lot of obvious case merging opportunities. */
206 group_case_labels ();
208 /* Create the edges of the flowgraph. */
209 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
210 free);
211 make_edges ();
212 cleanup_dead_labels ();
213 htab_delete (discriminator_per_locus);
215 /* Debugging dumps. */
217 /* Write the flowgraph to a VCG file. */
219 int local_dump_flags;
220 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
221 if (vcg_file)
223 gimple_cfg2vcg (vcg_file);
224 dump_end (TDI_vcg, vcg_file);
228 #ifdef ENABLE_CHECKING
229 verify_stmts ();
230 #endif
233 static unsigned int
234 execute_build_cfg (void)
236 gimple_seq body = gimple_body (current_function_decl);
238 build_gimple_cfg (body);
239 gimple_set_body (current_function_decl, NULL);
240 if (dump_file && (dump_flags & TDF_DETAILS))
242 fprintf (dump_file, "Scope blocks:\n");
243 dump_scope_blocks (dump_file, dump_flags);
245 return 0;
248 struct gimple_opt_pass pass_build_cfg =
251 GIMPLE_PASS,
252 "cfg", /* name */
253 NULL, /* gate */
254 execute_build_cfg, /* execute */
255 NULL, /* sub */
256 NULL, /* next */
257 0, /* static_pass_number */
258 TV_TREE_CFG, /* tv_id */
259 PROP_gimple_leh, /* properties_required */
260 PROP_cfg, /* properties_provided */
261 0, /* properties_destroyed */
262 0, /* todo_flags_start */
263 TODO_verify_stmts | TODO_cleanup_cfg
264 | TODO_dump_func /* todo_flags_finish */
269 /* Return true if T is a computed goto. */
271 static bool
272 computed_goto_p (gimple t)
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
279 /* Search the CFG for any computed gotos. If found, factor them to a
280 common computed goto site. Also record the location of that site so
281 that we can un-factor the gotos after we have converted back to
282 normal form. */
284 static void
285 factor_computed_gotos (void)
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
297 FOR_EACH_BB (bb)
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
302 if (gsi_end_p (gsi))
303 continue;
305 last = gsi_stmt (gsi);
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
315 gimple assignment;
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
320 if (!factored_computed_goto)
322 basic_block new_bb = create_empty_bb (bb);
323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 factored_computed_goto_label
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
339 /* Build our new computed goto. */
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
344 /* Copy the original computed goto's destination into VAR. */
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348 /* And re-vector the computed goto to the new destination. */
349 gimple_goto_set_dest (last, factored_label_decl);
355 /* Build a flowgraph for the sequence of stmts SEQ. */
357 static void
358 make_blocks (gimple_seq seq)
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
362 bool start_new_block = true;
363 bool first_stmt_of_seq = true;
364 basic_block bb = ENTRY_BLOCK_PTR;
366 while (!gsi_end_p (i))
368 gimple prev_stmt;
370 prev_stmt = stmt;
371 stmt = gsi_stmt (i);
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 if (!first_stmt_of_seq)
379 seq = gsi_split_seq_before (&i);
380 bb = create_basic_block (seq, NULL, bb);
381 start_new_block = false;
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
386 gimple_set_bb (stmt, bb);
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 start_new_block = true;
418 gsi_next (&i);
419 first_stmt_of_seq = false;
424 /* Create and return a new empty basic block after bb AFTER. */
426 static basic_block
427 create_bb (void *h, void *e, basic_block after)
429 basic_block bb;
431 gcc_assert (!e);
433 /* Create and initialize a new basic block. Since alloc_block uses
434 ggc_alloc_cleared to allocate a basic block, we do not have to
435 clear the newly allocated basic block here. */
436 bb = alloc_block ();
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
440 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
441 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
446 /* Grow the basic block array if needed. */
447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
453 /* Add the newly created block to the array. */
454 SET_BASIC_BLOCK (last_basic_block, bb);
456 n_basic_blocks++;
457 last_basic_block++;
459 return bb;
463 /*---------------------------------------------------------------------------
464 Edge creation
465 ---------------------------------------------------------------------------*/
467 /* Fold COND_EXPR_COND of each COND_EXPR. */
469 void
470 fold_cond_expr_cond (void)
472 basic_block bb;
474 FOR_EACH_BB (bb)
476 gimple stmt = last_stmt (bb);
478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 location_t loc = gimple_location (stmt);
481 tree cond;
482 bool zerop, onep;
484 fold_defer_overflow_warnings ();
485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
492 else
493 zerop = onep = false;
495 fold_undefer_overflow_warnings (zerop || onep,
496 stmt,
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
499 gimple_cond_make_false (stmt);
500 else if (onep)
501 gimple_cond_make_true (stmt);
506 /* Join all the blocks in the flowgraph. */
508 static void
509 make_edges (void)
511 basic_block bb;
512 struct omp_region *cur_region = NULL;
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518 /* Traverse the basic block array placing edges. */
519 FOR_EACH_BB (bb)
521 gimple last = last_stmt (bb);
522 bool fallthru;
524 if (last)
526 enum gimple_code code = gimple_code (last);
527 switch (code)
529 case GIMPLE_GOTO:
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
533 case GIMPLE_RETURN:
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
537 case GIMPLE_COND:
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
543 fallthru = false;
544 break;
545 case GIMPLE_RESX:
546 make_eh_edges (last);
547 fallthru = false;
548 break;
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
553 case GIMPLE_CALL:
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
557 if (stmt_can_make_abnormal_goto (last))
558 make_abnormal_goto_edges (bb, true);
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
564 /* Some calls are known not to return. */
565 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
566 break;
568 case GIMPLE_ASSIGN:
569 /* A GIMPLE_ASSIGN may throw internally and thus be considered
570 control-altering. */
571 if (is_ctrl_altering_stmt (last))
572 make_eh_edges (last);
573 fallthru = true;
574 break;
576 case GIMPLE_ASM:
577 make_gimple_asm_edges (bb);
578 fallthru = true;
579 break;
581 case GIMPLE_OMP_PARALLEL:
582 case GIMPLE_OMP_TASK:
583 case GIMPLE_OMP_FOR:
584 case GIMPLE_OMP_SINGLE:
585 case GIMPLE_OMP_MASTER:
586 case GIMPLE_OMP_ORDERED:
587 case GIMPLE_OMP_CRITICAL:
588 case GIMPLE_OMP_SECTION:
589 cur_region = new_omp_region (bb, code, cur_region);
590 fallthru = true;
591 break;
593 case GIMPLE_OMP_SECTIONS:
594 cur_region = new_omp_region (bb, code, cur_region);
595 fallthru = true;
596 break;
598 case GIMPLE_OMP_SECTIONS_SWITCH:
599 fallthru = false;
600 break;
602 case GIMPLE_OMP_ATOMIC_LOAD:
603 case GIMPLE_OMP_ATOMIC_STORE:
604 fallthru = true;
605 break;
607 case GIMPLE_OMP_RETURN:
608 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
609 somewhere other than the next block. This will be
610 created later. */
611 cur_region->exit = bb;
612 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
613 cur_region = cur_region->outer;
614 break;
616 case GIMPLE_OMP_CONTINUE:
617 cur_region->cont = bb;
618 switch (cur_region->type)
620 case GIMPLE_OMP_FOR:
621 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
622 succs edges as abnormal to prevent splitting
623 them. */
624 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
625 /* Make the loopback edge. */
626 make_edge (bb, single_succ (cur_region->entry),
627 EDGE_ABNORMAL);
629 /* Create an edge from GIMPLE_OMP_FOR to exit, which
630 corresponds to the case that the body of the loop
631 is not executed at all. */
632 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
634 fallthru = false;
635 break;
637 case GIMPLE_OMP_SECTIONS:
638 /* Wire up the edges into and out of the nested sections. */
640 basic_block switch_bb = single_succ (cur_region->entry);
642 struct omp_region *i;
643 for (i = cur_region->inner; i ; i = i->next)
645 gcc_assert (i->type == GIMPLE_OMP_SECTION);
646 make_edge (switch_bb, i->entry, 0);
647 make_edge (i->exit, bb, EDGE_FALLTHRU);
650 /* Make the loopback edge to the block with
651 GIMPLE_OMP_SECTIONS_SWITCH. */
652 make_edge (bb, switch_bb, 0);
654 /* Make the edge from the switch to exit. */
655 make_edge (switch_bb, bb->next_bb, 0);
656 fallthru = false;
658 break;
660 default:
661 gcc_unreachable ();
663 break;
665 default:
666 gcc_assert (!stmt_ends_bb_p (last));
667 fallthru = true;
670 else
671 fallthru = true;
673 if (fallthru)
675 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
676 if (last)
677 assign_discriminator (gimple_location (last), bb->next_bb);
681 if (root_omp_region)
682 free_omp_regions ();
684 /* Fold COND_EXPR_COND of each COND_EXPR. */
685 fold_cond_expr_cond ();
688 /* Trivial hash function for a location_t. ITEM is a pointer to
689 a hash table entry that maps a location_t to a discriminator. */
691 static unsigned int
692 locus_map_hash (const void *item)
694 return ((const struct locus_discrim_map *) item)->locus;
697 /* Equality function for the locus-to-discriminator map. VA and VB
698 point to the two hash table entries to compare. */
700 static int
701 locus_map_eq (const void *va, const void *vb)
703 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
704 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
705 return a->locus == b->locus;
708 /* Find the next available discriminator value for LOCUS. The
709 discriminator distinguishes among several basic blocks that
710 share a common locus, allowing for more accurate sample-based
711 profiling. */
713 static int
714 next_discriminator_for_locus (location_t locus)
716 struct locus_discrim_map item;
717 struct locus_discrim_map **slot;
719 item.locus = locus;
720 item.discriminator = 0;
721 slot = (struct locus_discrim_map **)
722 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
723 (hashval_t) locus, INSERT);
724 gcc_assert (slot);
725 if (*slot == HTAB_EMPTY_ENTRY)
727 *slot = XNEW (struct locus_discrim_map);
728 gcc_assert (*slot);
729 (*slot)->locus = locus;
730 (*slot)->discriminator = 0;
732 (*slot)->discriminator++;
733 return (*slot)->discriminator;
736 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
738 static bool
739 same_line_p (location_t locus1, location_t locus2)
741 expanded_location from, to;
743 if (locus1 == locus2)
744 return true;
746 from = expand_location (locus1);
747 to = expand_location (locus2);
749 if (from.line != to.line)
750 return false;
751 if (from.file == to.file)
752 return true;
753 return (from.file != NULL
754 && to.file != NULL
755 && strcmp (from.file, to.file) == 0);
758 /* Assign a unique discriminator value to block BB if it begins at the same
759 LOCUS as its predecessor block. */
761 static void
762 assign_discriminator (location_t locus, basic_block bb)
764 gimple first_in_to_bb, last_in_to_bb;
766 if (locus == 0 || bb->discriminator != 0)
767 return;
769 first_in_to_bb = first_non_label_stmt (bb);
770 last_in_to_bb = last_stmt (bb);
771 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
772 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
773 bb->discriminator = next_discriminator_for_locus (locus);
776 /* Create the edges for a GIMPLE_COND starting at block BB. */
778 static void
779 make_cond_expr_edges (basic_block bb)
781 gimple entry = last_stmt (bb);
782 gimple then_stmt, else_stmt;
783 basic_block then_bb, else_bb;
784 tree then_label, else_label;
785 edge e;
786 location_t entry_locus;
788 gcc_assert (entry);
789 gcc_assert (gimple_code (entry) == GIMPLE_COND);
791 entry_locus = gimple_location (entry);
793 /* Entry basic blocks for each component. */
794 then_label = gimple_cond_true_label (entry);
795 else_label = gimple_cond_false_label (entry);
796 then_bb = label_to_block (then_label);
797 else_bb = label_to_block (else_label);
798 then_stmt = first_stmt (then_bb);
799 else_stmt = first_stmt (else_bb);
801 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
802 assign_discriminator (entry_locus, then_bb);
803 e->goto_locus = gimple_location (then_stmt);
804 if (e->goto_locus)
805 e->goto_block = gimple_block (then_stmt);
806 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
807 if (e)
809 assign_discriminator (entry_locus, else_bb);
810 e->goto_locus = gimple_location (else_stmt);
811 if (e->goto_locus)
812 e->goto_block = gimple_block (else_stmt);
815 /* We do not need the labels anymore. */
816 gimple_cond_set_true_label (entry, NULL_TREE);
817 gimple_cond_set_false_label (entry, NULL_TREE);
821 /* Called for each element in the hash table (P) as we delete the
822 edge to cases hash table.
824 Clear all the TREE_CHAINs to prevent problems with copying of
825 SWITCH_EXPRs and structure sharing rules, then free the hash table
826 element. */
828 static bool
829 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
830 void *data ATTRIBUTE_UNUSED)
832 tree t, next;
834 for (t = (tree) *value; t; t = next)
836 next = TREE_CHAIN (t);
837 TREE_CHAIN (t) = NULL;
840 *value = NULL;
841 return false;
844 /* Start recording information mapping edges to case labels. */
846 void
847 start_recording_case_labels (void)
849 gcc_assert (edge_to_cases == NULL);
850 edge_to_cases = pointer_map_create ();
853 /* Return nonzero if we are recording information for case labels. */
855 static bool
856 recording_case_labels_p (void)
858 return (edge_to_cases != NULL);
861 /* Stop recording information mapping edges to case labels and
862 remove any information we have recorded. */
863 void
864 end_recording_case_labels (void)
866 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
867 pointer_map_destroy (edge_to_cases);
868 edge_to_cases = NULL;
871 /* If we are inside a {start,end}_recording_cases block, then return
872 a chain of CASE_LABEL_EXPRs from T which reference E.
874 Otherwise return NULL. */
876 static tree
877 get_cases_for_edge (edge e, gimple t)
879 void **slot;
880 size_t i, n;
882 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
883 chains available. Return NULL so the caller can detect this case. */
884 if (!recording_case_labels_p ())
885 return NULL;
887 slot = pointer_map_contains (edge_to_cases, e);
888 if (slot)
889 return (tree) *slot;
891 /* If we did not find E in the hash table, then this must be the first
892 time we have been queried for information about E & T. Add all the
893 elements from T to the hash table then perform the query again. */
895 n = gimple_switch_num_labels (t);
896 for (i = 0; i < n; i++)
898 tree elt = gimple_switch_label (t, i);
899 tree lab = CASE_LABEL (elt);
900 basic_block label_bb = label_to_block (lab);
901 edge this_edge = find_edge (e->src, label_bb);
903 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
904 a new chain. */
905 slot = pointer_map_insert (edge_to_cases, this_edge);
906 TREE_CHAIN (elt) = (tree) *slot;
907 *slot = elt;
910 return (tree) *pointer_map_contains (edge_to_cases, e);
913 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
915 static void
916 make_gimple_switch_edges (basic_block bb)
918 gimple entry = last_stmt (bb);
919 location_t entry_locus;
920 size_t i, n;
922 entry_locus = gimple_location (entry);
924 n = gimple_switch_num_labels (entry);
926 for (i = 0; i < n; ++i)
928 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
929 basic_block label_bb = label_to_block (lab);
930 make_edge (bb, label_bb, 0);
931 assign_discriminator (entry_locus, label_bb);
936 /* Return the basic block holding label DEST. */
938 basic_block
939 label_to_block_fn (struct function *ifun, tree dest)
941 int uid = LABEL_DECL_UID (dest);
943 /* We would die hard when faced by an undefined label. Emit a label to
944 the very first basic block. This will hopefully make even the dataflow
945 and undefined variable warnings quite right. */
946 if ((errorcount || sorrycount) && uid < 0)
948 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
949 gimple stmt;
951 stmt = gimple_build_label (dest);
952 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
953 uid = LABEL_DECL_UID (dest);
955 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
956 <= (unsigned int) uid)
957 return NULL;
958 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
961 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
962 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
964 void
965 make_abnormal_goto_edges (basic_block bb, bool for_call)
967 basic_block target_bb;
968 gimple_stmt_iterator gsi;
970 FOR_EACH_BB (target_bb)
971 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
973 gimple label_stmt = gsi_stmt (gsi);
974 tree target;
976 if (gimple_code (label_stmt) != GIMPLE_LABEL)
977 break;
979 target = gimple_label_label (label_stmt);
981 /* Make an edge to every label block that has been marked as a
982 potential target for a computed goto or a non-local goto. */
983 if ((FORCED_LABEL (target) && !for_call)
984 || (DECL_NONLOCAL (target) && for_call))
986 make_edge (bb, target_bb, EDGE_ABNORMAL);
987 break;
992 /* Create edges for a goto statement at block BB. */
994 static void
995 make_goto_expr_edges (basic_block bb)
997 gimple_stmt_iterator last = gsi_last_bb (bb);
998 gimple goto_t = gsi_stmt (last);
1000 /* A simple GOTO creates normal edges. */
1001 if (simple_goto_p (goto_t))
1003 tree dest = gimple_goto_dest (goto_t);
1004 basic_block label_bb = label_to_block (dest);
1005 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1006 e->goto_locus = gimple_location (goto_t);
1007 assign_discriminator (e->goto_locus, label_bb);
1008 if (e->goto_locus)
1009 e->goto_block = gimple_block (goto_t);
1010 gsi_remove (&last, true);
1011 return;
1014 /* A computed GOTO creates abnormal edges. */
1015 make_abnormal_goto_edges (bb, false);
1018 /* Create edges for an asm statement with labels at block BB. */
1020 static void
1021 make_gimple_asm_edges (basic_block bb)
1023 gimple stmt = last_stmt (bb);
1024 location_t stmt_loc = gimple_location (stmt);
1025 int i, n = gimple_asm_nlabels (stmt);
1027 for (i = 0; i < n; ++i)
1029 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1030 basic_block label_bb = label_to_block (label);
1031 make_edge (bb, label_bb, 0);
1032 assign_discriminator (stmt_loc, label_bb);
1036 /*---------------------------------------------------------------------------
1037 Flowgraph analysis
1038 ---------------------------------------------------------------------------*/
1040 /* Cleanup useless labels in basic blocks. This is something we wish
1041 to do early because it allows us to group case labels before creating
1042 the edges for the CFG, and it speeds up block statement iterators in
1043 all passes later on.
1044 We rerun this pass after CFG is created, to get rid of the labels that
1045 are no longer referenced. After then we do not run it any more, since
1046 (almost) no new labels should be created. */
1048 /* A map from basic block index to the leading label of that block. */
1049 static struct label_record
1051 /* The label. */
1052 tree label;
1054 /* True if the label is referenced from somewhere. */
1055 bool used;
1056 } *label_for_bb;
1058 /* Given LABEL return the first label in the same basic block. */
1060 static tree
1061 main_block_label (tree label)
1063 basic_block bb = label_to_block (label);
1064 tree main_label = label_for_bb[bb->index].label;
1066 /* label_to_block possibly inserted undefined label into the chain. */
1067 if (!main_label)
1069 label_for_bb[bb->index].label = label;
1070 main_label = label;
1073 label_for_bb[bb->index].used = true;
1074 return main_label;
1077 /* Clean up redundant labels within the exception tree. */
1079 static void
1080 cleanup_dead_labels_eh (void)
1082 eh_landing_pad lp;
1083 eh_region r;
1084 tree lab;
1085 int i;
1087 if (cfun->eh == NULL)
1088 return;
1090 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1091 if (lp && lp->post_landing_pad)
1093 lab = main_block_label (lp->post_landing_pad);
1094 if (lab != lp->post_landing_pad)
1096 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1097 EH_LANDING_PAD_NR (lab) = lp->index;
1101 FOR_ALL_EH_REGION (r)
1102 switch (r->type)
1104 case ERT_CLEANUP:
1105 case ERT_MUST_NOT_THROW:
1106 break;
1108 case ERT_TRY:
1110 eh_catch c;
1111 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1113 lab = c->label;
1114 if (lab)
1115 c->label = main_block_label (lab);
1118 break;
1120 case ERT_ALLOWED_EXCEPTIONS:
1121 lab = r->u.allowed.label;
1122 if (lab)
1123 r->u.allowed.label = main_block_label (lab);
1124 break;
1129 /* Cleanup redundant labels. This is a three-step process:
1130 1) Find the leading label for each block.
1131 2) Redirect all references to labels to the leading labels.
1132 3) Cleanup all useless labels. */
1134 void
1135 cleanup_dead_labels (void)
1137 basic_block bb;
1138 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1140 /* Find a suitable label for each block. We use the first user-defined
1141 label if there is one, or otherwise just the first label we see. */
1142 FOR_EACH_BB (bb)
1144 gimple_stmt_iterator i;
1146 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1148 tree label;
1149 gimple stmt = gsi_stmt (i);
1151 if (gimple_code (stmt) != GIMPLE_LABEL)
1152 break;
1154 label = gimple_label_label (stmt);
1156 /* If we have not yet seen a label for the current block,
1157 remember this one and see if there are more labels. */
1158 if (!label_for_bb[bb->index].label)
1160 label_for_bb[bb->index].label = label;
1161 continue;
1164 /* If we did see a label for the current block already, but it
1165 is an artificially created label, replace it if the current
1166 label is a user defined label. */
1167 if (!DECL_ARTIFICIAL (label)
1168 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1170 label_for_bb[bb->index].label = label;
1171 break;
1176 /* Now redirect all jumps/branches to the selected label.
1177 First do so for each block ending in a control statement. */
1178 FOR_EACH_BB (bb)
1180 gimple stmt = last_stmt (bb);
1181 if (!stmt)
1182 continue;
1184 switch (gimple_code (stmt))
1186 case GIMPLE_COND:
1188 tree true_label = gimple_cond_true_label (stmt);
1189 tree false_label = gimple_cond_false_label (stmt);
1191 if (true_label)
1192 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1193 if (false_label)
1194 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1195 break;
1198 case GIMPLE_SWITCH:
1200 size_t i, n = gimple_switch_num_labels (stmt);
1202 /* Replace all destination labels. */
1203 for (i = 0; i < n; ++i)
1205 tree case_label = gimple_switch_label (stmt, i);
1206 tree label = main_block_label (CASE_LABEL (case_label));
1207 CASE_LABEL (case_label) = label;
1209 break;
1212 case GIMPLE_ASM:
1214 int i, n = gimple_asm_nlabels (stmt);
1216 for (i = 0; i < n; ++i)
1218 tree cons = gimple_asm_label_op (stmt, i);
1219 tree label = main_block_label (TREE_VALUE (cons));
1220 TREE_VALUE (cons) = label;
1222 break;
1225 /* We have to handle gotos until they're removed, and we don't
1226 remove them until after we've created the CFG edges. */
1227 case GIMPLE_GOTO:
1228 if (!computed_goto_p (stmt))
1230 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1231 gimple_goto_set_dest (stmt, new_dest);
1233 break;
1235 default:
1236 break;
1240 /* Do the same for the exception region tree labels. */
1241 cleanup_dead_labels_eh ();
1243 /* Finally, purge dead labels. All user-defined labels and labels that
1244 can be the target of non-local gotos and labels which have their
1245 address taken are preserved. */
1246 FOR_EACH_BB (bb)
1248 gimple_stmt_iterator i;
1249 tree label_for_this_bb = label_for_bb[bb->index].label;
1251 if (!label_for_this_bb)
1252 continue;
1254 /* If the main label of the block is unused, we may still remove it. */
1255 if (!label_for_bb[bb->index].used)
1256 label_for_this_bb = NULL;
1258 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1260 tree label;
1261 gimple stmt = gsi_stmt (i);
1263 if (gimple_code (stmt) != GIMPLE_LABEL)
1264 break;
1266 label = gimple_label_label (stmt);
1268 if (label == label_for_this_bb
1269 || !DECL_ARTIFICIAL (label)
1270 || DECL_NONLOCAL (label)
1271 || FORCED_LABEL (label))
1272 gsi_next (&i);
1273 else
1274 gsi_remove (&i, true);
1278 free (label_for_bb);
1281 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1282 and scan the sorted vector of cases. Combine the ones jumping to the
1283 same label.
1284 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1286 void
1287 group_case_labels (void)
1289 basic_block bb;
1291 FOR_EACH_BB (bb)
1293 gimple stmt = last_stmt (bb);
1294 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1296 int old_size = gimple_switch_num_labels (stmt);
1297 int i, j, new_size = old_size;
1298 tree default_case = NULL_TREE;
1299 tree default_label = NULL_TREE;
1300 bool has_default;
1302 /* The default label is always the first case in a switch
1303 statement after gimplification if it was not optimized
1304 away */
1305 if (!CASE_LOW (gimple_switch_default_label (stmt))
1306 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1308 default_case = gimple_switch_default_label (stmt);
1309 default_label = CASE_LABEL (default_case);
1310 has_default = true;
1312 else
1313 has_default = false;
1315 /* Look for possible opportunities to merge cases. */
1316 if (has_default)
1317 i = 1;
1318 else
1319 i = 0;
1320 while (i < old_size)
1322 tree base_case, base_label, base_high;
1323 base_case = gimple_switch_label (stmt, i);
1325 gcc_assert (base_case);
1326 base_label = CASE_LABEL (base_case);
1328 /* Discard cases that have the same destination as the
1329 default case. */
1330 if (base_label == default_label)
1332 gimple_switch_set_label (stmt, i, NULL_TREE);
1333 i++;
1334 new_size--;
1335 continue;
1338 base_high = CASE_HIGH (base_case)
1339 ? CASE_HIGH (base_case)
1340 : CASE_LOW (base_case);
1341 i++;
1343 /* Try to merge case labels. Break out when we reach the end
1344 of the label vector or when we cannot merge the next case
1345 label with the current one. */
1346 while (i < old_size)
1348 tree merge_case = gimple_switch_label (stmt, i);
1349 tree merge_label = CASE_LABEL (merge_case);
1350 tree t = int_const_binop (PLUS_EXPR, base_high,
1351 integer_one_node, 1);
1353 /* Merge the cases if they jump to the same place,
1354 and their ranges are consecutive. */
1355 if (merge_label == base_label
1356 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1358 base_high = CASE_HIGH (merge_case) ?
1359 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1360 CASE_HIGH (base_case) = base_high;
1361 gimple_switch_set_label (stmt, i, NULL_TREE);
1362 new_size--;
1363 i++;
1365 else
1366 break;
1370 /* Compress the case labels in the label vector, and adjust the
1371 length of the vector. */
1372 for (i = 0, j = 0; i < new_size; i++)
1374 while (! gimple_switch_label (stmt, j))
1375 j++;
1376 gimple_switch_set_label (stmt, i,
1377 gimple_switch_label (stmt, j++));
1380 gcc_assert (new_size <= old_size);
1381 gimple_switch_set_num_labels (stmt, new_size);
1386 /* Checks whether we can merge block B into block A. */
1388 static bool
1389 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1391 gimple stmt;
1392 gimple_stmt_iterator gsi;
1393 gimple_seq phis;
1395 if (!single_succ_p (a))
1396 return false;
1398 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1399 return false;
1401 if (single_succ (a) != b)
1402 return false;
1404 if (!single_pred_p (b))
1405 return false;
1407 if (b == EXIT_BLOCK_PTR)
1408 return false;
1410 /* If A ends by a statement causing exceptions or something similar, we
1411 cannot merge the blocks. */
1412 stmt = last_stmt (a);
1413 if (stmt && stmt_ends_bb_p (stmt))
1414 return false;
1416 /* Do not allow a block with only a non-local label to be merged. */
1417 if (stmt
1418 && gimple_code (stmt) == GIMPLE_LABEL
1419 && DECL_NONLOCAL (gimple_label_label (stmt)))
1420 return false;
1422 /* Examine the labels at the beginning of B. */
1423 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1425 tree lab;
1426 stmt = gsi_stmt (gsi);
1427 if (gimple_code (stmt) != GIMPLE_LABEL)
1428 break;
1429 lab = gimple_label_label (stmt);
1431 /* Do not remove user labels. */
1432 if (!DECL_ARTIFICIAL (lab))
1433 return false;
1436 /* Protect the loop latches. */
1437 if (current_loops && b->loop_father->latch == b)
1438 return false;
1440 /* It must be possible to eliminate all phi nodes in B. If ssa form
1441 is not up-to-date, we cannot eliminate any phis; however, if only
1442 some symbols as whole are marked for renaming, this is not a problem,
1443 as phi nodes for those symbols are irrelevant in updating anyway. */
1444 phis = phi_nodes (b);
1445 if (!gimple_seq_empty_p (phis))
1447 gimple_stmt_iterator i;
1449 if (name_mappings_registered_p ())
1450 return false;
1452 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1454 gimple phi = gsi_stmt (i);
1456 if (!is_gimple_reg (gimple_phi_result (phi))
1457 && !may_propagate_copy (gimple_phi_result (phi),
1458 gimple_phi_arg_def (phi, 0)))
1459 return false;
1463 return true;
1466 /* Return true if the var whose chain of uses starts at PTR has no
1467 nondebug uses. */
1468 bool
1469 has_zero_uses_1 (const ssa_use_operand_t *head)
1471 const ssa_use_operand_t *ptr;
1473 for (ptr = head->next; ptr != head; ptr = ptr->next)
1474 if (!is_gimple_debug (USE_STMT (ptr)))
1475 return false;
1477 return true;
1480 /* Return true if the var whose chain of uses starts at PTR has a
1481 single nondebug use. Set USE_P and STMT to that single nondebug
1482 use, if so, or to NULL otherwise. */
1483 bool
1484 single_imm_use_1 (const ssa_use_operand_t *head,
1485 use_operand_p *use_p, gimple *stmt)
1487 ssa_use_operand_t *ptr, *single_use = 0;
1489 for (ptr = head->next; ptr != head; ptr = ptr->next)
1490 if (!is_gimple_debug (USE_STMT (ptr)))
1492 if (single_use)
1494 single_use = NULL;
1495 break;
1497 single_use = ptr;
1500 if (use_p)
1501 *use_p = single_use;
1503 if (stmt)
1504 *stmt = single_use ? single_use->loc.stmt : NULL;
1506 return !!single_use;
1509 /* Replaces all uses of NAME by VAL. */
1511 void
1512 replace_uses_by (tree name, tree val)
1514 imm_use_iterator imm_iter;
1515 use_operand_p use;
1516 gimple stmt;
1517 edge e;
1519 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1521 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1523 replace_exp (use, val);
1525 if (gimple_code (stmt) == GIMPLE_PHI)
1527 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1528 if (e->flags & EDGE_ABNORMAL)
1530 /* This can only occur for virtual operands, since
1531 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1532 would prevent replacement. */
1533 gcc_assert (!is_gimple_reg (name));
1534 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1539 if (gimple_code (stmt) != GIMPLE_PHI)
1541 size_t i;
1543 fold_stmt_inplace (stmt);
1544 if (cfgcleanup_altered_bbs)
1545 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1547 /* FIXME. This should go in update_stmt. */
1548 for (i = 0; i < gimple_num_ops (stmt); i++)
1550 tree op = gimple_op (stmt, i);
1551 /* Operands may be empty here. For example, the labels
1552 of a GIMPLE_COND are nulled out following the creation
1553 of the corresponding CFG edges. */
1554 if (op && TREE_CODE (op) == ADDR_EXPR)
1555 recompute_tree_invariant_for_addr_expr (op);
1558 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1559 update_stmt (stmt);
1563 gcc_assert (has_zero_uses (name));
1565 /* Also update the trees stored in loop structures. */
1566 if (current_loops)
1568 struct loop *loop;
1569 loop_iterator li;
1571 FOR_EACH_LOOP (li, loop, 0)
1573 substitute_in_loop_info (loop, name, val);
1578 /* Merge block B into block A. */
1580 static void
1581 gimple_merge_blocks (basic_block a, basic_block b)
1583 gimple_stmt_iterator last, gsi, psi;
1584 gimple_seq phis = phi_nodes (b);
1586 if (dump_file)
1587 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1589 /* Remove all single-valued PHI nodes from block B of the form
1590 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1591 gsi = gsi_last_bb (a);
1592 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1594 gimple phi = gsi_stmt (psi);
1595 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1596 gimple copy;
1597 bool may_replace_uses = !is_gimple_reg (def)
1598 || may_propagate_copy (def, use);
1600 /* In case we maintain loop closed ssa form, do not propagate arguments
1601 of loop exit phi nodes. */
1602 if (current_loops
1603 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1604 && is_gimple_reg (def)
1605 && TREE_CODE (use) == SSA_NAME
1606 && a->loop_father != b->loop_father)
1607 may_replace_uses = false;
1609 if (!may_replace_uses)
1611 gcc_assert (is_gimple_reg (def));
1613 /* Note that just emitting the copies is fine -- there is no problem
1614 with ordering of phi nodes. This is because A is the single
1615 predecessor of B, therefore results of the phi nodes cannot
1616 appear as arguments of the phi nodes. */
1617 copy = gimple_build_assign (def, use);
1618 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1619 remove_phi_node (&psi, false);
1621 else
1623 /* If we deal with a PHI for virtual operands, we can simply
1624 propagate these without fussing with folding or updating
1625 the stmt. */
1626 if (!is_gimple_reg (def))
1628 imm_use_iterator iter;
1629 use_operand_p use_p;
1630 gimple stmt;
1632 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1633 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1634 SET_USE (use_p, use);
1636 else
1637 replace_uses_by (def, use);
1639 remove_phi_node (&psi, true);
1643 /* Ensure that B follows A. */
1644 move_block_after (b, a);
1646 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1647 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1649 /* Remove labels from B and set gimple_bb to A for other statements. */
1650 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1652 gimple stmt = gsi_stmt (gsi);
1653 if (gimple_code (stmt) == GIMPLE_LABEL)
1655 tree label = gimple_label_label (stmt);
1656 int lp_nr;
1658 gsi_remove (&gsi, false);
1660 /* Now that we can thread computed gotos, we might have
1661 a situation where we have a forced label in block B
1662 However, the label at the start of block B might still be
1663 used in other ways (think about the runtime checking for
1664 Fortran assigned gotos). So we can not just delete the
1665 label. Instead we move the label to the start of block A. */
1666 if (FORCED_LABEL (label))
1668 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1669 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1672 lp_nr = EH_LANDING_PAD_NR (label);
1673 if (lp_nr)
1675 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1676 lp->post_landing_pad = NULL;
1679 else
1681 gimple_set_bb (stmt, a);
1682 gsi_next (&gsi);
1686 /* Merge the sequences. */
1687 last = gsi_last_bb (a);
1688 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1689 set_bb_seq (b, NULL);
1691 if (cfgcleanup_altered_bbs)
1692 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1696 /* Return the one of two successors of BB that is not reachable by a
1697 complex edge, if there is one. Else, return BB. We use
1698 this in optimizations that use post-dominators for their heuristics,
1699 to catch the cases in C++ where function calls are involved. */
1701 basic_block
1702 single_noncomplex_succ (basic_block bb)
1704 edge e0, e1;
1705 if (EDGE_COUNT (bb->succs) != 2)
1706 return bb;
1708 e0 = EDGE_SUCC (bb, 0);
1709 e1 = EDGE_SUCC (bb, 1);
1710 if (e0->flags & EDGE_COMPLEX)
1711 return e1->dest;
1712 if (e1->flags & EDGE_COMPLEX)
1713 return e0->dest;
1715 return bb;
1718 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1720 void
1721 notice_special_calls (gimple call)
1723 int flags = gimple_call_flags (call);
1725 if (flags & ECF_MAY_BE_ALLOCA)
1726 cfun->calls_alloca = true;
1727 if (flags & ECF_RETURNS_TWICE)
1728 cfun->calls_setjmp = true;
1732 /* Clear flags set by notice_special_calls. Used by dead code removal
1733 to update the flags. */
1735 void
1736 clear_special_calls (void)
1738 cfun->calls_alloca = false;
1739 cfun->calls_setjmp = false;
1742 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1744 static void
1745 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1747 /* Since this block is no longer reachable, we can just delete all
1748 of its PHI nodes. */
1749 remove_phi_nodes (bb);
1751 /* Remove edges to BB's successors. */
1752 while (EDGE_COUNT (bb->succs) > 0)
1753 remove_edge (EDGE_SUCC (bb, 0));
1757 /* Remove statements of basic block BB. */
1759 static void
1760 remove_bb (basic_block bb)
1762 gimple_stmt_iterator i;
1764 if (dump_file)
1766 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1767 if (dump_flags & TDF_DETAILS)
1769 dump_bb (bb, dump_file, 0);
1770 fprintf (dump_file, "\n");
1774 if (current_loops)
1776 struct loop *loop = bb->loop_father;
1778 /* If a loop gets removed, clean up the information associated
1779 with it. */
1780 if (loop->latch == bb
1781 || loop->header == bb)
1782 free_numbers_of_iterations_estimates_loop (loop);
1785 /* Remove all the instructions in the block. */
1786 if (bb_seq (bb) != NULL)
1788 /* Walk backwards so as to get a chance to substitute all
1789 released DEFs into debug stmts. See
1790 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1791 details. */
1792 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1794 gimple stmt = gsi_stmt (i);
1795 if (gimple_code (stmt) == GIMPLE_LABEL
1796 && (FORCED_LABEL (gimple_label_label (stmt))
1797 || DECL_NONLOCAL (gimple_label_label (stmt))))
1799 basic_block new_bb;
1800 gimple_stmt_iterator new_gsi;
1802 /* A non-reachable non-local label may still be referenced.
1803 But it no longer needs to carry the extra semantics of
1804 non-locality. */
1805 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1807 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1808 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1811 new_bb = bb->prev_bb;
1812 new_gsi = gsi_start_bb (new_bb);
1813 gsi_remove (&i, false);
1814 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1816 else
1818 /* Release SSA definitions if we are in SSA. Note that we
1819 may be called when not in SSA. For example,
1820 final_cleanup calls this function via
1821 cleanup_tree_cfg. */
1822 if (gimple_in_ssa_p (cfun))
1823 release_defs (stmt);
1825 gsi_remove (&i, true);
1828 if (gsi_end_p (i))
1829 i = gsi_last_bb (bb);
1830 else
1831 gsi_prev (&i);
1835 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1836 bb->il.gimple = NULL;
1840 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1841 predicate VAL, return the edge that will be taken out of the block.
1842 If VAL does not match a unique edge, NULL is returned. */
1844 edge
1845 find_taken_edge (basic_block bb, tree val)
1847 gimple stmt;
1849 stmt = last_stmt (bb);
1851 gcc_assert (stmt);
1852 gcc_assert (is_ctrl_stmt (stmt));
1854 if (val == NULL)
1855 return NULL;
1857 if (!is_gimple_min_invariant (val))
1858 return NULL;
1860 if (gimple_code (stmt) == GIMPLE_COND)
1861 return find_taken_edge_cond_expr (bb, val);
1863 if (gimple_code (stmt) == GIMPLE_SWITCH)
1864 return find_taken_edge_switch_expr (bb, val);
1866 if (computed_goto_p (stmt))
1868 /* Only optimize if the argument is a label, if the argument is
1869 not a label then we can not construct a proper CFG.
1871 It may be the case that we only need to allow the LABEL_REF to
1872 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1873 appear inside a LABEL_EXPR just to be safe. */
1874 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1875 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1876 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1877 return NULL;
1880 gcc_unreachable ();
1883 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1884 statement, determine which of the outgoing edges will be taken out of the
1885 block. Return NULL if either edge may be taken. */
1887 static edge
1888 find_taken_edge_computed_goto (basic_block bb, tree val)
1890 basic_block dest;
1891 edge e = NULL;
1893 dest = label_to_block (val);
1894 if (dest)
1896 e = find_edge (bb, dest);
1897 gcc_assert (e != NULL);
1900 return e;
1903 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1904 statement, determine which of the two edges will be taken out of the
1905 block. Return NULL if either edge may be taken. */
1907 static edge
1908 find_taken_edge_cond_expr (basic_block bb, tree val)
1910 edge true_edge, false_edge;
1912 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1914 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1915 return (integer_zerop (val) ? false_edge : true_edge);
1918 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1919 statement, determine which edge will be taken out of the block. Return
1920 NULL if any edge may be taken. */
1922 static edge
1923 find_taken_edge_switch_expr (basic_block bb, tree val)
1925 basic_block dest_bb;
1926 edge e;
1927 gimple switch_stmt;
1928 tree taken_case;
1930 switch_stmt = last_stmt (bb);
1931 taken_case = find_case_label_for_value (switch_stmt, val);
1932 dest_bb = label_to_block (CASE_LABEL (taken_case));
1934 e = find_edge (bb, dest_bb);
1935 gcc_assert (e);
1936 return e;
1940 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1941 We can make optimal use here of the fact that the case labels are
1942 sorted: We can do a binary search for a case matching VAL. */
1944 static tree
1945 find_case_label_for_value (gimple switch_stmt, tree val)
1947 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1948 tree default_case = gimple_switch_default_label (switch_stmt);
1950 for (low = 0, high = n; high - low > 1; )
1952 size_t i = (high + low) / 2;
1953 tree t = gimple_switch_label (switch_stmt, i);
1954 int cmp;
1956 /* Cache the result of comparing CASE_LOW and val. */
1957 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1959 if (cmp > 0)
1960 high = i;
1961 else
1962 low = i;
1964 if (CASE_HIGH (t) == NULL)
1966 /* A singe-valued case label. */
1967 if (cmp == 0)
1968 return t;
1970 else
1972 /* A case range. We can only handle integer ranges. */
1973 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1974 return t;
1978 return default_case;
1982 /* Dump a basic block on stderr. */
1984 void
1985 gimple_debug_bb (basic_block bb)
1987 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
1991 /* Dump basic block with index N on stderr. */
1993 basic_block
1994 gimple_debug_bb_n (int n)
1996 gimple_debug_bb (BASIC_BLOCK (n));
1997 return BASIC_BLOCK (n);
2001 /* Dump the CFG on stderr.
2003 FLAGS are the same used by the tree dumping functions
2004 (see TDF_* in tree-pass.h). */
2006 void
2007 gimple_debug_cfg (int flags)
2009 gimple_dump_cfg (stderr, flags);
2013 /* Dump the program showing basic block boundaries on the given FILE.
2015 FLAGS are the same used by the tree dumping functions (see TDF_* in
2016 tree.h). */
2018 void
2019 gimple_dump_cfg (FILE *file, int flags)
2021 if (flags & TDF_DETAILS)
2023 const char *funcname
2024 = lang_hooks.decl_printable_name (current_function_decl, 2);
2026 fputc ('\n', file);
2027 fprintf (file, ";; Function %s\n\n", funcname);
2028 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2029 n_basic_blocks, n_edges, last_basic_block);
2031 brief_dump_cfg (file);
2032 fprintf (file, "\n");
2035 if (flags & TDF_STATS)
2036 dump_cfg_stats (file);
2038 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2042 /* Dump CFG statistics on FILE. */
2044 void
2045 dump_cfg_stats (FILE *file)
2047 static long max_num_merged_labels = 0;
2048 unsigned long size, total = 0;
2049 long num_edges;
2050 basic_block bb;
2051 const char * const fmt_str = "%-30s%-13s%12s\n";
2052 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2053 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2054 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2055 const char *funcname
2056 = lang_hooks.decl_printable_name (current_function_decl, 2);
2059 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2061 fprintf (file, "---------------------------------------------------------\n");
2062 fprintf (file, fmt_str, "", " Number of ", "Memory");
2063 fprintf (file, fmt_str, "", " instances ", "used ");
2064 fprintf (file, "---------------------------------------------------------\n");
2066 size = n_basic_blocks * sizeof (struct basic_block_def);
2067 total += size;
2068 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2069 SCALE (size), LABEL (size));
2071 num_edges = 0;
2072 FOR_EACH_BB (bb)
2073 num_edges += EDGE_COUNT (bb->succs);
2074 size = num_edges * sizeof (struct edge_def);
2075 total += size;
2076 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2078 fprintf (file, "---------------------------------------------------------\n");
2079 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2080 LABEL (total));
2081 fprintf (file, "---------------------------------------------------------\n");
2082 fprintf (file, "\n");
2084 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2085 max_num_merged_labels = cfg_stats.num_merged_labels;
2087 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2088 cfg_stats.num_merged_labels, max_num_merged_labels);
2090 fprintf (file, "\n");
2094 /* Dump CFG statistics on stderr. Keep extern so that it's always
2095 linked in the final executable. */
2097 void
2098 debug_cfg_stats (void)
2100 dump_cfg_stats (stderr);
2104 /* Dump the flowgraph to a .vcg FILE. */
2106 static void
2107 gimple_cfg2vcg (FILE *file)
2109 edge e;
2110 edge_iterator ei;
2111 basic_block bb;
2112 const char *funcname
2113 = lang_hooks.decl_printable_name (current_function_decl, 2);
2115 /* Write the file header. */
2116 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2117 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2118 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2120 /* Write blocks and edges. */
2121 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2123 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2124 e->dest->index);
2126 if (e->flags & EDGE_FAKE)
2127 fprintf (file, " linestyle: dotted priority: 10");
2128 else
2129 fprintf (file, " linestyle: solid priority: 100");
2131 fprintf (file, " }\n");
2133 fputc ('\n', file);
2135 FOR_EACH_BB (bb)
2137 enum gimple_code head_code, end_code;
2138 const char *head_name, *end_name;
2139 int head_line = 0;
2140 int end_line = 0;
2141 gimple first = first_stmt (bb);
2142 gimple last = last_stmt (bb);
2144 if (first)
2146 head_code = gimple_code (first);
2147 head_name = gimple_code_name[head_code];
2148 head_line = get_lineno (first);
2150 else
2151 head_name = "no-statement";
2153 if (last)
2155 end_code = gimple_code (last);
2156 end_name = gimple_code_name[end_code];
2157 end_line = get_lineno (last);
2159 else
2160 end_name = "no-statement";
2162 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2163 bb->index, bb->index, head_name, head_line, end_name,
2164 end_line);
2166 FOR_EACH_EDGE (e, ei, bb->succs)
2168 if (e->dest == EXIT_BLOCK_PTR)
2169 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2170 else
2171 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2173 if (e->flags & EDGE_FAKE)
2174 fprintf (file, " priority: 10 linestyle: dotted");
2175 else
2176 fprintf (file, " priority: 100 linestyle: solid");
2178 fprintf (file, " }\n");
2181 if (bb->next_bb != EXIT_BLOCK_PTR)
2182 fputc ('\n', file);
2185 fputs ("}\n\n", file);
2190 /*---------------------------------------------------------------------------
2191 Miscellaneous helpers
2192 ---------------------------------------------------------------------------*/
2194 /* Return true if T represents a stmt that always transfers control. */
2196 bool
2197 is_ctrl_stmt (gimple t)
2199 switch (gimple_code (t))
2201 case GIMPLE_COND:
2202 case GIMPLE_SWITCH:
2203 case GIMPLE_GOTO:
2204 case GIMPLE_RETURN:
2205 case GIMPLE_RESX:
2206 return true;
2207 default:
2208 return false;
2213 /* Return true if T is a statement that may alter the flow of control
2214 (e.g., a call to a non-returning function). */
2216 bool
2217 is_ctrl_altering_stmt (gimple t)
2219 gcc_assert (t);
2221 switch (gimple_code (t))
2223 case GIMPLE_CALL:
2225 int flags = gimple_call_flags (t);
2227 /* A non-pure/const call alters flow control if the current
2228 function has nonlocal labels. */
2229 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2230 return true;
2232 /* A call also alters control flow if it does not return. */
2233 if (flags & ECF_NORETURN)
2234 return true;
2236 break;
2238 case GIMPLE_EH_DISPATCH:
2239 /* EH_DISPATCH branches to the individual catch handlers at
2240 this level of a try or allowed-exceptions region. It can
2241 fallthru to the next statement as well. */
2242 return true;
2244 case GIMPLE_ASM:
2245 if (gimple_asm_nlabels (t) > 0)
2246 return true;
2247 break;
2249 CASE_GIMPLE_OMP:
2250 /* OpenMP directives alter control flow. */
2251 return true;
2253 default:
2254 break;
2257 /* If a statement can throw, it alters control flow. */
2258 return stmt_can_throw_internal (t);
2262 /* Return true if T is a simple local goto. */
2264 bool
2265 simple_goto_p (gimple t)
2267 return (gimple_code (t) == GIMPLE_GOTO
2268 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2272 /* Return true if T can make an abnormal transfer of control flow.
2273 Transfers of control flow associated with EH are excluded. */
2275 bool
2276 stmt_can_make_abnormal_goto (gimple t)
2278 if (computed_goto_p (t))
2279 return true;
2280 if (is_gimple_call (t))
2281 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2282 return false;
2286 /* Return true if STMT should start a new basic block. PREV_STMT is
2287 the statement preceding STMT. It is used when STMT is a label or a
2288 case label. Labels should only start a new basic block if their
2289 previous statement wasn't a label. Otherwise, sequence of labels
2290 would generate unnecessary basic blocks that only contain a single
2291 label. */
2293 static inline bool
2294 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2296 if (stmt == NULL)
2297 return false;
2299 /* Labels start a new basic block only if the preceding statement
2300 wasn't a label of the same type. This prevents the creation of
2301 consecutive blocks that have nothing but a single label. */
2302 if (gimple_code (stmt) == GIMPLE_LABEL)
2304 /* Nonlocal and computed GOTO targets always start a new block. */
2305 if (DECL_NONLOCAL (gimple_label_label (stmt))
2306 || FORCED_LABEL (gimple_label_label (stmt)))
2307 return true;
2309 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2311 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2312 return true;
2314 cfg_stats.num_merged_labels++;
2315 return false;
2317 else
2318 return true;
2321 return false;
2325 /* Return true if T should end a basic block. */
2327 bool
2328 stmt_ends_bb_p (gimple t)
2330 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2333 /* Remove block annotations and other data structures. */
2335 void
2336 delete_tree_cfg_annotations (void)
2338 label_to_block_map = NULL;
2342 /* Return the first statement in basic block BB. */
2344 gimple
2345 first_stmt (basic_block bb)
2347 gimple_stmt_iterator i = gsi_start_bb (bb);
2348 gimple stmt = NULL;
2350 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2352 gsi_next (&i);
2353 stmt = NULL;
2355 return stmt;
2358 /* Return the first non-label statement in basic block BB. */
2360 static gimple
2361 first_non_label_stmt (basic_block bb)
2363 gimple_stmt_iterator i = gsi_start_bb (bb);
2364 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2365 gsi_next (&i);
2366 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2369 /* Return the last statement in basic block BB. */
2371 gimple
2372 last_stmt (basic_block bb)
2374 gimple_stmt_iterator i = gsi_last_bb (bb);
2375 gimple stmt = NULL;
2377 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2379 gsi_prev (&i);
2380 stmt = NULL;
2382 return stmt;
2385 /* Return the last statement of an otherwise empty block. Return NULL
2386 if the block is totally empty, or if it contains more than one
2387 statement. */
2389 gimple
2390 last_and_only_stmt (basic_block bb)
2392 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2393 gimple last, prev;
2395 if (gsi_end_p (i))
2396 return NULL;
2398 last = gsi_stmt (i);
2399 gsi_prev_nondebug (&i);
2400 if (gsi_end_p (i))
2401 return last;
2403 /* Empty statements should no longer appear in the instruction stream.
2404 Everything that might have appeared before should be deleted by
2405 remove_useless_stmts, and the optimizers should just gsi_remove
2406 instead of smashing with build_empty_stmt.
2408 Thus the only thing that should appear here in a block containing
2409 one executable statement is a label. */
2410 prev = gsi_stmt (i);
2411 if (gimple_code (prev) == GIMPLE_LABEL)
2412 return last;
2413 else
2414 return NULL;
2417 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2419 static void
2420 reinstall_phi_args (edge new_edge, edge old_edge)
2422 edge_var_map_vector v;
2423 edge_var_map *vm;
2424 int i;
2425 gimple_stmt_iterator phis;
2427 v = redirect_edge_var_map_vector (old_edge);
2428 if (!v)
2429 return;
2431 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2432 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2433 i++, gsi_next (&phis))
2435 gimple phi = gsi_stmt (phis);
2436 tree result = redirect_edge_var_map_result (vm);
2437 tree arg = redirect_edge_var_map_def (vm);
2439 gcc_assert (result == gimple_phi_result (phi));
2441 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2444 redirect_edge_var_map_clear (old_edge);
2447 /* Returns the basic block after which the new basic block created
2448 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2449 near its "logical" location. This is of most help to humans looking
2450 at debugging dumps. */
2452 static basic_block
2453 split_edge_bb_loc (edge edge_in)
2455 basic_block dest = edge_in->dest;
2456 basic_block dest_prev = dest->prev_bb;
2458 if (dest_prev)
2460 edge e = find_edge (dest_prev, dest);
2461 if (e && !(e->flags & EDGE_COMPLEX))
2462 return edge_in->src;
2464 return dest_prev;
2467 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2468 Abort on abnormal edges. */
2470 static basic_block
2471 gimple_split_edge (edge edge_in)
2473 basic_block new_bb, after_bb, dest;
2474 edge new_edge, e;
2476 /* Abnormal edges cannot be split. */
2477 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2479 dest = edge_in->dest;
2481 after_bb = split_edge_bb_loc (edge_in);
2483 new_bb = create_empty_bb (after_bb);
2484 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2485 new_bb->count = edge_in->count;
2486 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2487 new_edge->probability = REG_BR_PROB_BASE;
2488 new_edge->count = edge_in->count;
2490 e = redirect_edge_and_branch (edge_in, new_bb);
2491 gcc_assert (e == edge_in);
2492 reinstall_phi_args (new_edge, e);
2494 return new_bb;
2497 /* Callback for walk_tree, check that all elements with address taken are
2498 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2499 inside a PHI node. */
2501 static tree
2502 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2504 tree t = *tp, x;
2506 if (TYPE_P (t))
2507 *walk_subtrees = 0;
2509 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2510 #define CHECK_OP(N, MSG) \
2511 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2512 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2514 switch (TREE_CODE (t))
2516 case SSA_NAME:
2517 if (SSA_NAME_IN_FREE_LIST (t))
2519 error ("SSA name in freelist but still referenced");
2520 return *tp;
2522 break;
2524 case INDIRECT_REF:
2525 x = TREE_OPERAND (t, 0);
2526 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2528 error ("Indirect reference's operand is not a register or a constant.");
2529 return x;
2531 break;
2533 case ASSERT_EXPR:
2534 x = fold (ASSERT_EXPR_COND (t));
2535 if (x == boolean_false_node)
2537 error ("ASSERT_EXPR with an always-false condition");
2538 return *tp;
2540 break;
2542 case MODIFY_EXPR:
2543 error ("MODIFY_EXPR not expected while having tuples.");
2544 return *tp;
2546 case ADDR_EXPR:
2548 bool old_constant;
2549 bool old_side_effects;
2550 bool new_constant;
2551 bool new_side_effects;
2553 gcc_assert (is_gimple_address (t));
2555 old_constant = TREE_CONSTANT (t);
2556 old_side_effects = TREE_SIDE_EFFECTS (t);
2558 recompute_tree_invariant_for_addr_expr (t);
2559 new_side_effects = TREE_SIDE_EFFECTS (t);
2560 new_constant = TREE_CONSTANT (t);
2562 if (old_constant != new_constant)
2564 error ("constant not recomputed when ADDR_EXPR changed");
2565 return t;
2567 if (old_side_effects != new_side_effects)
2569 error ("side effects not recomputed when ADDR_EXPR changed");
2570 return t;
2573 /* Skip any references (they will be checked when we recurse down the
2574 tree) and ensure that any variable used as a prefix is marked
2575 addressable. */
2576 for (x = TREE_OPERAND (t, 0);
2577 handled_component_p (x);
2578 x = TREE_OPERAND (x, 0))
2581 if (!(TREE_CODE (x) == VAR_DECL
2582 || TREE_CODE (x) == PARM_DECL
2583 || TREE_CODE (x) == RESULT_DECL))
2584 return NULL;
2585 if (!TREE_ADDRESSABLE (x))
2587 error ("address taken, but ADDRESSABLE bit not set");
2588 return x;
2590 if (DECL_GIMPLE_REG_P (x))
2592 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2593 return x;
2596 break;
2599 case COND_EXPR:
2600 x = COND_EXPR_COND (t);
2601 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2603 error ("non-integral used in condition");
2604 return x;
2606 if (!is_gimple_condexpr (x))
2608 error ("invalid conditional operand");
2609 return x;
2611 break;
2613 case NON_LVALUE_EXPR:
2614 gcc_unreachable ();
2616 CASE_CONVERT:
2617 case FIX_TRUNC_EXPR:
2618 case FLOAT_EXPR:
2619 case NEGATE_EXPR:
2620 case ABS_EXPR:
2621 case BIT_NOT_EXPR:
2622 case TRUTH_NOT_EXPR:
2623 CHECK_OP (0, "invalid operand to unary operator");
2624 break;
2626 case REALPART_EXPR:
2627 case IMAGPART_EXPR:
2628 case COMPONENT_REF:
2629 case ARRAY_REF:
2630 case ARRAY_RANGE_REF:
2631 case BIT_FIELD_REF:
2632 case VIEW_CONVERT_EXPR:
2633 /* We have a nest of references. Verify that each of the operands
2634 that determine where to reference is either a constant or a variable,
2635 verify that the base is valid, and then show we've already checked
2636 the subtrees. */
2637 while (handled_component_p (t))
2639 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2640 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2641 else if (TREE_CODE (t) == ARRAY_REF
2642 || TREE_CODE (t) == ARRAY_RANGE_REF)
2644 CHECK_OP (1, "invalid array index");
2645 if (TREE_OPERAND (t, 2))
2646 CHECK_OP (2, "invalid array lower bound");
2647 if (TREE_OPERAND (t, 3))
2648 CHECK_OP (3, "invalid array stride");
2650 else if (TREE_CODE (t) == BIT_FIELD_REF)
2652 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2653 || !host_integerp (TREE_OPERAND (t, 2), 1))
2655 error ("invalid position or size operand to BIT_FIELD_REF");
2656 return t;
2658 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2659 && (TYPE_PRECISION (TREE_TYPE (t))
2660 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2662 error ("integral result type precision does not match "
2663 "field size of BIT_FIELD_REF");
2664 return t;
2666 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2667 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2668 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2670 error ("mode precision of non-integral result does not "
2671 "match field size of BIT_FIELD_REF");
2672 return t;
2676 t = TREE_OPERAND (t, 0);
2679 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2681 error ("invalid reference prefix");
2682 return t;
2684 *walk_subtrees = 0;
2685 break;
2686 case PLUS_EXPR:
2687 case MINUS_EXPR:
2688 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2689 POINTER_PLUS_EXPR. */
2690 if (POINTER_TYPE_P (TREE_TYPE (t)))
2692 error ("invalid operand to plus/minus, type is a pointer");
2693 return t;
2695 CHECK_OP (0, "invalid operand to binary operator");
2696 CHECK_OP (1, "invalid operand to binary operator");
2697 break;
2699 case POINTER_PLUS_EXPR:
2700 /* Check to make sure the first operand is a pointer or reference type. */
2701 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2703 error ("invalid operand to pointer plus, first operand is not a pointer");
2704 return t;
2706 /* Check to make sure the second operand is an integer with type of
2707 sizetype. */
2708 if (!useless_type_conversion_p (sizetype,
2709 TREE_TYPE (TREE_OPERAND (t, 1))))
2711 error ("invalid operand to pointer plus, second operand is not an "
2712 "integer with type of sizetype.");
2713 return t;
2715 /* FALLTHROUGH */
2716 case LT_EXPR:
2717 case LE_EXPR:
2718 case GT_EXPR:
2719 case GE_EXPR:
2720 case EQ_EXPR:
2721 case NE_EXPR:
2722 case UNORDERED_EXPR:
2723 case ORDERED_EXPR:
2724 case UNLT_EXPR:
2725 case UNLE_EXPR:
2726 case UNGT_EXPR:
2727 case UNGE_EXPR:
2728 case UNEQ_EXPR:
2729 case LTGT_EXPR:
2730 case MULT_EXPR:
2731 case TRUNC_DIV_EXPR:
2732 case CEIL_DIV_EXPR:
2733 case FLOOR_DIV_EXPR:
2734 case ROUND_DIV_EXPR:
2735 case TRUNC_MOD_EXPR:
2736 case CEIL_MOD_EXPR:
2737 case FLOOR_MOD_EXPR:
2738 case ROUND_MOD_EXPR:
2739 case RDIV_EXPR:
2740 case EXACT_DIV_EXPR:
2741 case MIN_EXPR:
2742 case MAX_EXPR:
2743 case LSHIFT_EXPR:
2744 case RSHIFT_EXPR:
2745 case LROTATE_EXPR:
2746 case RROTATE_EXPR:
2747 case BIT_IOR_EXPR:
2748 case BIT_XOR_EXPR:
2749 case BIT_AND_EXPR:
2750 CHECK_OP (0, "invalid operand to binary operator");
2751 CHECK_OP (1, "invalid operand to binary operator");
2752 break;
2754 case CONSTRUCTOR:
2755 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2756 *walk_subtrees = 0;
2757 break;
2759 default:
2760 break;
2762 return NULL;
2764 #undef CHECK_OP
2768 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2769 Returns true if there is an error, otherwise false. */
2771 static bool
2772 verify_types_in_gimple_min_lval (tree expr)
2774 tree op;
2776 if (is_gimple_id (expr))
2777 return false;
2779 if (!INDIRECT_REF_P (expr)
2780 && TREE_CODE (expr) != TARGET_MEM_REF)
2782 error ("invalid expression for min lvalue");
2783 return true;
2786 /* TARGET_MEM_REFs are strange beasts. */
2787 if (TREE_CODE (expr) == TARGET_MEM_REF)
2788 return false;
2790 op = TREE_OPERAND (expr, 0);
2791 if (!is_gimple_val (op))
2793 error ("invalid operand in indirect reference");
2794 debug_generic_stmt (op);
2795 return true;
2797 if (!useless_type_conversion_p (TREE_TYPE (expr),
2798 TREE_TYPE (TREE_TYPE (op))))
2800 error ("type mismatch in indirect reference");
2801 debug_generic_stmt (TREE_TYPE (expr));
2802 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2803 return true;
2806 return false;
2809 /* Verify if EXPR is a valid GIMPLE reference expression. If
2810 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2811 if there is an error, otherwise false. */
2813 static bool
2814 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2816 while (handled_component_p (expr))
2818 tree op = TREE_OPERAND (expr, 0);
2820 if (TREE_CODE (expr) == ARRAY_REF
2821 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2823 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2824 || (TREE_OPERAND (expr, 2)
2825 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2826 || (TREE_OPERAND (expr, 3)
2827 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2829 error ("invalid operands to array reference");
2830 debug_generic_stmt (expr);
2831 return true;
2835 /* Verify if the reference array element types are compatible. */
2836 if (TREE_CODE (expr) == ARRAY_REF
2837 && !useless_type_conversion_p (TREE_TYPE (expr),
2838 TREE_TYPE (TREE_TYPE (op))))
2840 error ("type mismatch in array reference");
2841 debug_generic_stmt (TREE_TYPE (expr));
2842 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2843 return true;
2845 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2846 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2847 TREE_TYPE (TREE_TYPE (op))))
2849 error ("type mismatch in array range reference");
2850 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2851 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2852 return true;
2855 if ((TREE_CODE (expr) == REALPART_EXPR
2856 || TREE_CODE (expr) == IMAGPART_EXPR)
2857 && !useless_type_conversion_p (TREE_TYPE (expr),
2858 TREE_TYPE (TREE_TYPE (op))))
2860 error ("type mismatch in real/imagpart reference");
2861 debug_generic_stmt (TREE_TYPE (expr));
2862 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2863 return true;
2866 if (TREE_CODE (expr) == COMPONENT_REF
2867 && !useless_type_conversion_p (TREE_TYPE (expr),
2868 TREE_TYPE (TREE_OPERAND (expr, 1))))
2870 error ("type mismatch in component reference");
2871 debug_generic_stmt (TREE_TYPE (expr));
2872 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2873 return true;
2876 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2878 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2879 that their operand is not an SSA name or an invariant when
2880 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2881 bug). Otherwise there is nothing to verify, gross mismatches at
2882 most invoke undefined behavior. */
2883 if (require_lvalue
2884 && (TREE_CODE (op) == SSA_NAME
2885 || is_gimple_min_invariant (op)))
2887 error ("Conversion of an SSA_NAME on the left hand side.");
2888 debug_generic_stmt (expr);
2889 return true;
2891 else if (!handled_component_p (op))
2892 return false;
2895 expr = op;
2898 return ((require_lvalue || !is_gimple_min_invariant (expr))
2899 && verify_types_in_gimple_min_lval (expr));
2902 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2903 list of pointer-to types that is trivially convertible to DEST. */
2905 static bool
2906 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2908 tree src;
2910 if (!TYPE_POINTER_TO (src_obj))
2911 return true;
2913 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2914 if (useless_type_conversion_p (dest, src))
2915 return true;
2917 return false;
2920 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2921 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2923 static bool
2924 valid_fixed_convert_types_p (tree type1, tree type2)
2926 return (FIXED_POINT_TYPE_P (type1)
2927 && (INTEGRAL_TYPE_P (type2)
2928 || SCALAR_FLOAT_TYPE_P (type2)
2929 || FIXED_POINT_TYPE_P (type2)));
2932 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2933 is a problem, otherwise false. */
2935 static bool
2936 verify_gimple_call (gimple stmt)
2938 tree fn = gimple_call_fn (stmt);
2939 tree fntype;
2940 unsigned i;
2942 if (TREE_CODE (fn) != OBJ_TYPE_REF
2943 && !is_gimple_val (fn))
2945 error ("invalid function in gimple call");
2946 debug_generic_stmt (fn);
2947 return true;
2950 if (!POINTER_TYPE_P (TREE_TYPE (fn))
2951 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2952 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2954 error ("non-function in gimple call");
2955 return true;
2958 if (gimple_call_lhs (stmt)
2959 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2960 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2962 error ("invalid LHS in gimple call");
2963 return true;
2966 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2968 error ("LHS in noreturn call");
2969 return true;
2972 fntype = TREE_TYPE (TREE_TYPE (fn));
2973 if (gimple_call_lhs (stmt)
2974 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
2975 TREE_TYPE (fntype))
2976 /* ??? At least C++ misses conversions at assignments from
2977 void * call results.
2978 ??? Java is completely off. Especially with functions
2979 returning java.lang.Object.
2980 For now simply allow arbitrary pointer type conversions. */
2981 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
2982 && POINTER_TYPE_P (TREE_TYPE (fntype))))
2984 error ("invalid conversion in gimple call");
2985 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
2986 debug_generic_stmt (TREE_TYPE (fntype));
2987 return true;
2990 if (gimple_call_chain (stmt)
2991 && !is_gimple_val (gimple_call_chain (stmt)))
2993 error ("invalid static chain in gimple call");
2994 debug_generic_stmt (gimple_call_chain (stmt));
2995 return true;
2998 /* If there is a static chain argument, this should not be an indirect
2999 call, and the decl should have DECL_STATIC_CHAIN set. */
3000 if (gimple_call_chain (stmt))
3002 if (TREE_CODE (fn) != ADDR_EXPR
3003 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3005 error ("static chain in indirect gimple call");
3006 return true;
3008 fn = TREE_OPERAND (fn, 0);
3010 if (!DECL_STATIC_CHAIN (fn))
3012 error ("static chain with function that doesn't use one");
3013 return true;
3017 /* ??? The C frontend passes unpromoted arguments in case it
3018 didn't see a function declaration before the call. So for now
3019 leave the call arguments mostly unverified. Once we gimplify
3020 unit-at-a-time we have a chance to fix this. */
3022 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3024 tree arg = gimple_call_arg (stmt, i);
3025 if (!is_gimple_operand (arg))
3027 error ("invalid argument to gimple call");
3028 debug_generic_expr (arg);
3032 return false;
3035 /* Verifies the gimple comparison with the result type TYPE and
3036 the operands OP0 and OP1. */
3038 static bool
3039 verify_gimple_comparison (tree type, tree op0, tree op1)
3041 tree op0_type = TREE_TYPE (op0);
3042 tree op1_type = TREE_TYPE (op1);
3044 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3046 error ("invalid operands in gimple comparison");
3047 return true;
3050 /* For comparisons we do not have the operations type as the
3051 effective type the comparison is carried out in. Instead
3052 we require that either the first operand is trivially
3053 convertible into the second, or the other way around.
3054 The resulting type of a comparison may be any integral type.
3055 Because we special-case pointers to void we allow
3056 comparisons of pointers with the same mode as well. */
3057 if ((!useless_type_conversion_p (op0_type, op1_type)
3058 && !useless_type_conversion_p (op1_type, op0_type)
3059 && (!POINTER_TYPE_P (op0_type)
3060 || !POINTER_TYPE_P (op1_type)
3061 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3062 || !INTEGRAL_TYPE_P (type))
3064 error ("type mismatch in comparison expression");
3065 debug_generic_expr (type);
3066 debug_generic_expr (op0_type);
3067 debug_generic_expr (op1_type);
3068 return true;
3071 return false;
3074 /* Verify a gimple assignment statement STMT with an unary rhs.
3075 Returns true if anything is wrong. */
3077 static bool
3078 verify_gimple_assign_unary (gimple stmt)
3080 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3081 tree lhs = gimple_assign_lhs (stmt);
3082 tree lhs_type = TREE_TYPE (lhs);
3083 tree rhs1 = gimple_assign_rhs1 (stmt);
3084 tree rhs1_type = TREE_TYPE (rhs1);
3086 if (!is_gimple_reg (lhs)
3087 && !(optimize == 0
3088 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3090 error ("non-register as LHS of unary operation");
3091 return true;
3094 if (!is_gimple_val (rhs1))
3096 error ("invalid operand in unary operation");
3097 return true;
3100 /* First handle conversions. */
3101 switch (rhs_code)
3103 CASE_CONVERT:
3105 /* Allow conversions between integral types and pointers only if
3106 there is no sign or zero extension involved.
3107 For targets were the precision of sizetype doesn't match that
3108 of pointers we need to allow arbitrary conversions from and
3109 to sizetype. */
3110 if ((POINTER_TYPE_P (lhs_type)
3111 && INTEGRAL_TYPE_P (rhs1_type)
3112 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3113 || rhs1_type == sizetype))
3114 || (POINTER_TYPE_P (rhs1_type)
3115 && INTEGRAL_TYPE_P (lhs_type)
3116 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3117 || lhs_type == sizetype)))
3118 return false;
3120 /* Allow conversion from integer to offset type and vice versa. */
3121 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3122 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3123 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3124 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3125 return false;
3127 /* Otherwise assert we are converting between types of the
3128 same kind. */
3129 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3131 error ("invalid types in nop conversion");
3132 debug_generic_expr (lhs_type);
3133 debug_generic_expr (rhs1_type);
3134 return true;
3137 return false;
3140 case ADDR_SPACE_CONVERT_EXPR:
3142 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3143 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3144 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3146 error ("invalid types in address space conversion");
3147 debug_generic_expr (lhs_type);
3148 debug_generic_expr (rhs1_type);
3149 return true;
3152 return false;
3155 case FIXED_CONVERT_EXPR:
3157 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3158 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3160 error ("invalid types in fixed-point conversion");
3161 debug_generic_expr (lhs_type);
3162 debug_generic_expr (rhs1_type);
3163 return true;
3166 return false;
3169 case FLOAT_EXPR:
3171 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3173 error ("invalid types in conversion to floating point");
3174 debug_generic_expr (lhs_type);
3175 debug_generic_expr (rhs1_type);
3176 return true;
3179 return false;
3182 case FIX_TRUNC_EXPR:
3184 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3186 error ("invalid types in conversion to integer");
3187 debug_generic_expr (lhs_type);
3188 debug_generic_expr (rhs1_type);
3189 return true;
3192 return false;
3195 case VEC_UNPACK_HI_EXPR:
3196 case VEC_UNPACK_LO_EXPR:
3197 case REDUC_MAX_EXPR:
3198 case REDUC_MIN_EXPR:
3199 case REDUC_PLUS_EXPR:
3200 case VEC_UNPACK_FLOAT_HI_EXPR:
3201 case VEC_UNPACK_FLOAT_LO_EXPR:
3202 /* FIXME. */
3203 return false;
3205 case TRUTH_NOT_EXPR:
3206 case NEGATE_EXPR:
3207 case ABS_EXPR:
3208 case BIT_NOT_EXPR:
3209 case PAREN_EXPR:
3210 case NON_LVALUE_EXPR:
3211 case CONJ_EXPR:
3212 break;
3214 default:
3215 gcc_unreachable ();
3218 /* For the remaining codes assert there is no conversion involved. */
3219 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3221 error ("non-trivial conversion in unary operation");
3222 debug_generic_expr (lhs_type);
3223 debug_generic_expr (rhs1_type);
3224 return true;
3227 return false;
3230 /* Verify a gimple assignment statement STMT with a binary rhs.
3231 Returns true if anything is wrong. */
3233 static bool
3234 verify_gimple_assign_binary (gimple stmt)
3236 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3237 tree lhs = gimple_assign_lhs (stmt);
3238 tree lhs_type = TREE_TYPE (lhs);
3239 tree rhs1 = gimple_assign_rhs1 (stmt);
3240 tree rhs1_type = TREE_TYPE (rhs1);
3241 tree rhs2 = gimple_assign_rhs2 (stmt);
3242 tree rhs2_type = TREE_TYPE (rhs2);
3244 if (!is_gimple_reg (lhs)
3245 && !(optimize == 0
3246 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3248 error ("non-register as LHS of binary operation");
3249 return true;
3252 if (!is_gimple_val (rhs1)
3253 || !is_gimple_val (rhs2))
3255 error ("invalid operands in binary operation");
3256 return true;
3259 /* First handle operations that involve different types. */
3260 switch (rhs_code)
3262 case COMPLEX_EXPR:
3264 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3265 || !(INTEGRAL_TYPE_P (rhs1_type)
3266 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3267 || !(INTEGRAL_TYPE_P (rhs2_type)
3268 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3270 error ("type mismatch in complex expression");
3271 debug_generic_expr (lhs_type);
3272 debug_generic_expr (rhs1_type);
3273 debug_generic_expr (rhs2_type);
3274 return true;
3277 return false;
3280 case LSHIFT_EXPR:
3281 case RSHIFT_EXPR:
3282 case LROTATE_EXPR:
3283 case RROTATE_EXPR:
3285 /* Shifts and rotates are ok on integral types, fixed point
3286 types and integer vector types. */
3287 if ((!INTEGRAL_TYPE_P (rhs1_type)
3288 && !FIXED_POINT_TYPE_P (rhs1_type)
3289 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3290 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3291 || (!INTEGRAL_TYPE_P (rhs2_type)
3292 /* Vector shifts of vectors are also ok. */
3293 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3294 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3295 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3296 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3297 || !useless_type_conversion_p (lhs_type, rhs1_type))
3299 error ("type mismatch in shift expression");
3300 debug_generic_expr (lhs_type);
3301 debug_generic_expr (rhs1_type);
3302 debug_generic_expr (rhs2_type);
3303 return true;
3306 return false;
3309 case VEC_LSHIFT_EXPR:
3310 case VEC_RSHIFT_EXPR:
3312 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3313 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3314 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3315 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3316 || (!INTEGRAL_TYPE_P (rhs2_type)
3317 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3318 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3319 || !useless_type_conversion_p (lhs_type, rhs1_type))
3321 error ("type mismatch in vector shift expression");
3322 debug_generic_expr (lhs_type);
3323 debug_generic_expr (rhs1_type);
3324 debug_generic_expr (rhs2_type);
3325 return true;
3327 /* For shifting a vector of floating point components we
3328 only allow shifting by a constant multiple of the element size. */
3329 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3330 && (TREE_CODE (rhs2) != INTEGER_CST
3331 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3332 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3334 error ("non-element sized vector shift of floating point vector");
3335 return true;
3338 return false;
3341 case PLUS_EXPR:
3343 /* We use regular PLUS_EXPR for vectors.
3344 ??? This just makes the checker happy and may not be what is
3345 intended. */
3346 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3347 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3349 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3350 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3352 error ("invalid non-vector operands to vector valued plus");
3353 return true;
3355 lhs_type = TREE_TYPE (lhs_type);
3356 rhs1_type = TREE_TYPE (rhs1_type);
3357 rhs2_type = TREE_TYPE (rhs2_type);
3358 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3359 the pointer to 2nd place. */
3360 if (POINTER_TYPE_P (rhs2_type))
3362 tree tem = rhs1_type;
3363 rhs1_type = rhs2_type;
3364 rhs2_type = tem;
3366 goto do_pointer_plus_expr_check;
3369 /* Fallthru. */
3370 case MINUS_EXPR:
3372 if (POINTER_TYPE_P (lhs_type)
3373 || POINTER_TYPE_P (rhs1_type)
3374 || POINTER_TYPE_P (rhs2_type))
3376 error ("invalid (pointer) operands to plus/minus");
3377 return true;
3380 /* Continue with generic binary expression handling. */
3381 break;
3384 case POINTER_PLUS_EXPR:
3386 do_pointer_plus_expr_check:
3387 if (!POINTER_TYPE_P (rhs1_type)
3388 || !useless_type_conversion_p (lhs_type, rhs1_type)
3389 || !useless_type_conversion_p (sizetype, rhs2_type))
3391 error ("type mismatch in pointer plus expression");
3392 debug_generic_stmt (lhs_type);
3393 debug_generic_stmt (rhs1_type);
3394 debug_generic_stmt (rhs2_type);
3395 return true;
3398 return false;
3401 case TRUTH_ANDIF_EXPR:
3402 case TRUTH_ORIF_EXPR:
3403 gcc_unreachable ();
3405 case TRUTH_AND_EXPR:
3406 case TRUTH_OR_EXPR:
3407 case TRUTH_XOR_EXPR:
3409 /* We allow any kind of integral typed argument and result. */
3410 if (!INTEGRAL_TYPE_P (rhs1_type)
3411 || !INTEGRAL_TYPE_P (rhs2_type)
3412 || !INTEGRAL_TYPE_P (lhs_type))
3414 error ("type mismatch in binary truth expression");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 debug_generic_expr (rhs2_type);
3418 return true;
3421 return false;
3424 case LT_EXPR:
3425 case LE_EXPR:
3426 case GT_EXPR:
3427 case GE_EXPR:
3428 case EQ_EXPR:
3429 case NE_EXPR:
3430 case UNORDERED_EXPR:
3431 case ORDERED_EXPR:
3432 case UNLT_EXPR:
3433 case UNLE_EXPR:
3434 case UNGT_EXPR:
3435 case UNGE_EXPR:
3436 case UNEQ_EXPR:
3437 case LTGT_EXPR:
3438 /* Comparisons are also binary, but the result type is not
3439 connected to the operand types. */
3440 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3442 case WIDEN_SUM_EXPR:
3443 case WIDEN_MULT_EXPR:
3444 case VEC_WIDEN_MULT_HI_EXPR:
3445 case VEC_WIDEN_MULT_LO_EXPR:
3446 case VEC_PACK_TRUNC_EXPR:
3447 case VEC_PACK_SAT_EXPR:
3448 case VEC_PACK_FIX_TRUNC_EXPR:
3449 case VEC_EXTRACT_EVEN_EXPR:
3450 case VEC_EXTRACT_ODD_EXPR:
3451 case VEC_INTERLEAVE_HIGH_EXPR:
3452 case VEC_INTERLEAVE_LOW_EXPR:
3453 /* FIXME. */
3454 return false;
3456 case MULT_EXPR:
3457 case TRUNC_DIV_EXPR:
3458 case CEIL_DIV_EXPR:
3459 case FLOOR_DIV_EXPR:
3460 case ROUND_DIV_EXPR:
3461 case TRUNC_MOD_EXPR:
3462 case CEIL_MOD_EXPR:
3463 case FLOOR_MOD_EXPR:
3464 case ROUND_MOD_EXPR:
3465 case RDIV_EXPR:
3466 case EXACT_DIV_EXPR:
3467 case MIN_EXPR:
3468 case MAX_EXPR:
3469 case BIT_IOR_EXPR:
3470 case BIT_XOR_EXPR:
3471 case BIT_AND_EXPR:
3472 /* Continue with generic binary expression handling. */
3473 break;
3475 default:
3476 gcc_unreachable ();
3479 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3480 || !useless_type_conversion_p (lhs_type, rhs2_type))
3482 error ("type mismatch in binary expression");
3483 debug_generic_stmt (lhs_type);
3484 debug_generic_stmt (rhs1_type);
3485 debug_generic_stmt (rhs2_type);
3486 return true;
3489 return false;
3492 /* Verify a gimple assignment statement STMT with a single rhs.
3493 Returns true if anything is wrong. */
3495 static bool
3496 verify_gimple_assign_single (gimple stmt)
3498 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3499 tree lhs = gimple_assign_lhs (stmt);
3500 tree lhs_type = TREE_TYPE (lhs);
3501 tree rhs1 = gimple_assign_rhs1 (stmt);
3502 tree rhs1_type = TREE_TYPE (rhs1);
3503 bool res = false;
3505 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3507 error ("non-trivial conversion at assignment");
3508 debug_generic_expr (lhs_type);
3509 debug_generic_expr (rhs1_type);
3510 return true;
3513 if (handled_component_p (lhs))
3514 res |= verify_types_in_gimple_reference (lhs, true);
3516 /* Special codes we cannot handle via their class. */
3517 switch (rhs_code)
3519 case ADDR_EXPR:
3521 tree op = TREE_OPERAND (rhs1, 0);
3522 if (!is_gimple_addressable (op))
3524 error ("invalid operand in unary expression");
3525 return true;
3528 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3529 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3530 TREE_TYPE (op)))
3532 error ("type mismatch in address expression");
3533 debug_generic_stmt (TREE_TYPE (rhs1));
3534 debug_generic_stmt (TREE_TYPE (op));
3535 return true;
3538 return verify_types_in_gimple_reference (op, true);
3541 /* tcc_reference */
3542 case COMPONENT_REF:
3543 case BIT_FIELD_REF:
3544 case INDIRECT_REF:
3545 case ALIGN_INDIRECT_REF:
3546 case MISALIGNED_INDIRECT_REF:
3547 case ARRAY_REF:
3548 case ARRAY_RANGE_REF:
3549 case VIEW_CONVERT_EXPR:
3550 case REALPART_EXPR:
3551 case IMAGPART_EXPR:
3552 case TARGET_MEM_REF:
3553 if (!is_gimple_reg (lhs)
3554 && is_gimple_reg_type (TREE_TYPE (lhs)))
3556 error ("invalid rhs for gimple memory store");
3557 debug_generic_stmt (lhs);
3558 debug_generic_stmt (rhs1);
3559 return true;
3561 return res || verify_types_in_gimple_reference (rhs1, false);
3563 /* tcc_constant */
3564 case SSA_NAME:
3565 case INTEGER_CST:
3566 case REAL_CST:
3567 case FIXED_CST:
3568 case COMPLEX_CST:
3569 case VECTOR_CST:
3570 case STRING_CST:
3571 return res;
3573 /* tcc_declaration */
3574 case CONST_DECL:
3575 return res;
3576 case VAR_DECL:
3577 case PARM_DECL:
3578 if (!is_gimple_reg (lhs)
3579 && !is_gimple_reg (rhs1)
3580 && is_gimple_reg_type (TREE_TYPE (lhs)))
3582 error ("invalid rhs for gimple memory store");
3583 debug_generic_stmt (lhs);
3584 debug_generic_stmt (rhs1);
3585 return true;
3587 return res;
3589 case COND_EXPR:
3590 case CONSTRUCTOR:
3591 case OBJ_TYPE_REF:
3592 case ASSERT_EXPR:
3593 case WITH_SIZE_EXPR:
3594 case POLYNOMIAL_CHREC:
3595 case DOT_PROD_EXPR:
3596 case VEC_COND_EXPR:
3597 case REALIGN_LOAD_EXPR:
3598 /* FIXME. */
3599 return res;
3601 default:;
3604 return res;
3607 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3608 is a problem, otherwise false. */
3610 static bool
3611 verify_gimple_assign (gimple stmt)
3613 switch (gimple_assign_rhs_class (stmt))
3615 case GIMPLE_SINGLE_RHS:
3616 return verify_gimple_assign_single (stmt);
3618 case GIMPLE_UNARY_RHS:
3619 return verify_gimple_assign_unary (stmt);
3621 case GIMPLE_BINARY_RHS:
3622 return verify_gimple_assign_binary (stmt);
3624 default:
3625 gcc_unreachable ();
3629 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3630 is a problem, otherwise false. */
3632 static bool
3633 verify_gimple_return (gimple stmt)
3635 tree op = gimple_return_retval (stmt);
3636 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3638 /* We cannot test for present return values as we do not fix up missing
3639 return values from the original source. */
3640 if (op == NULL)
3641 return false;
3643 if (!is_gimple_val (op)
3644 && TREE_CODE (op) != RESULT_DECL)
3646 error ("invalid operand in return statement");
3647 debug_generic_stmt (op);
3648 return true;
3651 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3652 /* ??? With C++ we can have the situation that the result
3653 decl is a reference type while the return type is an aggregate. */
3654 && !(TREE_CODE (op) == RESULT_DECL
3655 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3656 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3658 error ("invalid conversion in return statement");
3659 debug_generic_stmt (restype);
3660 debug_generic_stmt (TREE_TYPE (op));
3661 return true;
3664 return false;
3668 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3669 is a problem, otherwise false. */
3671 static bool
3672 verify_gimple_goto (gimple stmt)
3674 tree dest = gimple_goto_dest (stmt);
3676 /* ??? We have two canonical forms of direct goto destinations, a
3677 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3678 if (TREE_CODE (dest) != LABEL_DECL
3679 && (!is_gimple_val (dest)
3680 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3682 error ("goto destination is neither a label nor a pointer");
3683 return true;
3686 return false;
3689 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3690 is a problem, otherwise false. */
3692 static bool
3693 verify_gimple_switch (gimple stmt)
3695 if (!is_gimple_val (gimple_switch_index (stmt)))
3697 error ("invalid operand to switch statement");
3698 debug_generic_stmt (gimple_switch_index (stmt));
3699 return true;
3702 return false;
3706 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3707 and false otherwise. */
3709 static bool
3710 verify_gimple_phi (gimple stmt)
3712 tree type = TREE_TYPE (gimple_phi_result (stmt));
3713 unsigned i;
3715 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3717 error ("Invalid PHI result");
3718 return true;
3721 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3723 tree arg = gimple_phi_arg_def (stmt, i);
3724 if ((is_gimple_reg (gimple_phi_result (stmt))
3725 && !is_gimple_val (arg))
3726 || (!is_gimple_reg (gimple_phi_result (stmt))
3727 && !is_gimple_addressable (arg)))
3729 error ("Invalid PHI argument");
3730 debug_generic_stmt (arg);
3731 return true;
3733 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3735 error ("Incompatible types in PHI argument %u", i);
3736 debug_generic_stmt (type);
3737 debug_generic_stmt (TREE_TYPE (arg));
3738 return true;
3742 return false;
3746 /* Verify a gimple debug statement STMT.
3747 Returns true if anything is wrong. */
3749 static bool
3750 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3752 /* There isn't much that could be wrong in a gimple debug stmt. A
3753 gimple debug bind stmt, for example, maps a tree, that's usually
3754 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3755 component or member of an aggregate type, to another tree, that
3756 can be an arbitrary expression. These stmts expand into debug
3757 insns, and are converted to debug notes by var-tracking.c. */
3758 return false;
3762 /* Verify the GIMPLE statement STMT. Returns true if there is an
3763 error, otherwise false. */
3765 static bool
3766 verify_types_in_gimple_stmt (gimple stmt)
3768 switch (gimple_code (stmt))
3770 case GIMPLE_ASSIGN:
3771 return verify_gimple_assign (stmt);
3773 case GIMPLE_LABEL:
3774 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3776 case GIMPLE_CALL:
3777 return verify_gimple_call (stmt);
3779 case GIMPLE_COND:
3780 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3782 error ("invalid comparison code in gimple cond");
3783 return true;
3785 if (!(!gimple_cond_true_label (stmt)
3786 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3787 || !(!gimple_cond_false_label (stmt)
3788 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3790 error ("invalid labels in gimple cond");
3791 return true;
3794 return verify_gimple_comparison (boolean_type_node,
3795 gimple_cond_lhs (stmt),
3796 gimple_cond_rhs (stmt));
3798 case GIMPLE_GOTO:
3799 return verify_gimple_goto (stmt);
3801 case GIMPLE_SWITCH:
3802 return verify_gimple_switch (stmt);
3804 case GIMPLE_RETURN:
3805 return verify_gimple_return (stmt);
3807 case GIMPLE_ASM:
3808 return false;
3810 case GIMPLE_PHI:
3811 return verify_gimple_phi (stmt);
3813 /* Tuples that do not have tree operands. */
3814 case GIMPLE_NOP:
3815 case GIMPLE_PREDICT:
3816 case GIMPLE_RESX:
3817 case GIMPLE_EH_DISPATCH:
3818 case GIMPLE_EH_MUST_NOT_THROW:
3819 return false;
3821 CASE_GIMPLE_OMP:
3822 /* OpenMP directives are validated by the FE and never operated
3823 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3824 non-gimple expressions when the main index variable has had
3825 its address taken. This does not affect the loop itself
3826 because the header of an GIMPLE_OMP_FOR is merely used to determine
3827 how to setup the parallel iteration. */
3828 return false;
3830 case GIMPLE_DEBUG:
3831 return verify_gimple_debug (stmt);
3833 default:
3834 gcc_unreachable ();
3838 /* Verify the GIMPLE statements inside the sequence STMTS. */
3840 static bool
3841 verify_types_in_gimple_seq_2 (gimple_seq stmts)
3843 gimple_stmt_iterator ittr;
3844 bool err = false;
3846 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3848 gimple stmt = gsi_stmt (ittr);
3850 switch (gimple_code (stmt))
3852 case GIMPLE_BIND:
3853 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3854 break;
3856 case GIMPLE_TRY:
3857 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3858 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3859 break;
3861 case GIMPLE_EH_FILTER:
3862 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3863 break;
3865 case GIMPLE_CATCH:
3866 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3867 break;
3869 default:
3871 bool err2 = verify_types_in_gimple_stmt (stmt);
3872 if (err2)
3873 debug_gimple_stmt (stmt);
3874 err |= err2;
3879 return err;
3883 /* Verify the GIMPLE statements inside the statement list STMTS. */
3885 void
3886 verify_types_in_gimple_seq (gimple_seq stmts)
3888 if (verify_types_in_gimple_seq_2 (stmts))
3889 internal_error ("verify_gimple failed");
3893 /* Verify STMT, return true if STMT is not in GIMPLE form.
3894 TODO: Implement type checking. */
3896 static bool
3897 verify_stmt (gimple_stmt_iterator *gsi)
3899 tree addr;
3900 struct walk_stmt_info wi;
3901 bool last_in_block = gsi_one_before_end_p (*gsi);
3902 gimple stmt = gsi_stmt (*gsi);
3903 int lp_nr;
3905 if (is_gimple_omp (stmt))
3907 /* OpenMP directives are validated by the FE and never operated
3908 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3909 non-gimple expressions when the main index variable has had
3910 its address taken. This does not affect the loop itself
3911 because the header of an GIMPLE_OMP_FOR is merely used to determine
3912 how to setup the parallel iteration. */
3913 return false;
3916 /* FIXME. The C frontend passes unpromoted arguments in case it
3917 didn't see a function declaration before the call. */
3918 if (is_gimple_call (stmt))
3920 tree decl;
3922 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3924 error ("invalid function in call statement");
3925 return true;
3928 decl = gimple_call_fndecl (stmt);
3929 if (decl
3930 && TREE_CODE (decl) == FUNCTION_DECL
3931 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3932 && (!DECL_PURE_P (decl))
3933 && (!TREE_READONLY (decl)))
3935 error ("invalid pure const state for function");
3936 return true;
3940 if (is_gimple_debug (stmt))
3941 return false;
3943 memset (&wi, 0, sizeof (wi));
3944 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
3945 if (addr)
3947 debug_generic_expr (addr);
3948 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
3949 debug_gimple_stmt (stmt);
3950 return true;
3953 /* If the statement is marked as part of an EH region, then it is
3954 expected that the statement could throw. Verify that when we
3955 have optimizations that simplify statements such that we prove
3956 that they cannot throw, that we update other data structures
3957 to match. */
3958 lp_nr = lookup_stmt_eh_lp (stmt);
3959 if (lp_nr != 0)
3961 if (!stmt_could_throw_p (stmt))
3963 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
3964 and they are updated on statements only after fixup_cfg
3965 is executed at beggining of expansion stage. */
3966 if (cgraph_state != CGRAPH_STATE_IPA_SSA)
3968 error ("statement marked for throw, but doesn%'t");
3969 goto fail;
3972 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
3974 error ("statement marked for throw in middle of block");
3975 goto fail;
3979 return false;
3981 fail:
3982 debug_gimple_stmt (stmt);
3983 return true;
3987 /* Return true when the T can be shared. */
3989 bool
3990 tree_node_can_be_shared (tree t)
3992 if (IS_TYPE_OR_DECL_P (t)
3993 || is_gimple_min_invariant (t)
3994 || TREE_CODE (t) == SSA_NAME
3995 || t == error_mark_node
3996 || TREE_CODE (t) == IDENTIFIER_NODE)
3997 return true;
3999 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4000 return true;
4002 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4003 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4004 || TREE_CODE (t) == COMPONENT_REF
4005 || TREE_CODE (t) == REALPART_EXPR
4006 || TREE_CODE (t) == IMAGPART_EXPR)
4007 t = TREE_OPERAND (t, 0);
4009 if (DECL_P (t))
4010 return true;
4012 return false;
4016 /* Called via walk_gimple_stmt. Verify tree sharing. */
4018 static tree
4019 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4021 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4022 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4024 if (tree_node_can_be_shared (*tp))
4026 *walk_subtrees = false;
4027 return NULL;
4030 if (pointer_set_insert (visited, *tp))
4031 return *tp;
4033 return NULL;
4037 static bool eh_error_found;
4038 static int
4039 verify_eh_throw_stmt_node (void **slot, void *data)
4041 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4042 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4044 if (!pointer_set_contains (visited, node->stmt))
4046 error ("Dead STMT in EH table");
4047 debug_gimple_stmt (node->stmt);
4048 eh_error_found = true;
4050 return 1;
4054 /* Verify the GIMPLE statements in every basic block. */
4056 void
4057 verify_stmts (void)
4059 basic_block bb;
4060 gimple_stmt_iterator gsi;
4061 bool err = false;
4062 struct pointer_set_t *visited, *visited_stmts;
4063 tree addr;
4064 struct walk_stmt_info wi;
4066 timevar_push (TV_TREE_STMT_VERIFY);
4067 visited = pointer_set_create ();
4068 visited_stmts = pointer_set_create ();
4070 memset (&wi, 0, sizeof (wi));
4071 wi.info = (void *) visited;
4073 FOR_EACH_BB (bb)
4075 gimple phi;
4076 size_t i;
4078 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4080 phi = gsi_stmt (gsi);
4081 pointer_set_insert (visited_stmts, phi);
4082 if (gimple_bb (phi) != bb)
4084 error ("gimple_bb (phi) is set to a wrong basic block");
4085 err |= true;
4088 for (i = 0; i < gimple_phi_num_args (phi); i++)
4090 tree t = gimple_phi_arg_def (phi, i);
4091 tree addr;
4093 if (!t)
4095 error ("missing PHI def");
4096 debug_gimple_stmt (phi);
4097 err |= true;
4098 continue;
4100 /* Addressable variables do have SSA_NAMEs but they
4101 are not considered gimple values. */
4102 else if (TREE_CODE (t) != SSA_NAME
4103 && TREE_CODE (t) != FUNCTION_DECL
4104 && !is_gimple_min_invariant (t))
4106 error ("PHI argument is not a GIMPLE value");
4107 debug_gimple_stmt (phi);
4108 debug_generic_expr (t);
4109 err |= true;
4112 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4113 if (addr)
4115 error ("incorrect sharing of tree nodes");
4116 debug_gimple_stmt (phi);
4117 debug_generic_expr (addr);
4118 err |= true;
4122 #ifdef ENABLE_TYPES_CHECKING
4123 if (verify_gimple_phi (phi))
4125 debug_gimple_stmt (phi);
4126 err |= true;
4128 #endif
4131 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4133 gimple stmt = gsi_stmt (gsi);
4135 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4136 || gimple_code (stmt) == GIMPLE_BIND)
4138 error ("invalid GIMPLE statement");
4139 debug_gimple_stmt (stmt);
4140 err |= true;
4143 pointer_set_insert (visited_stmts, stmt);
4145 if (gimple_bb (stmt) != bb)
4147 error ("gimple_bb (stmt) is set to a wrong basic block");
4148 debug_gimple_stmt (stmt);
4149 err |= true;
4152 if (gimple_code (stmt) == GIMPLE_LABEL)
4154 tree decl = gimple_label_label (stmt);
4155 int uid = LABEL_DECL_UID (decl);
4157 if (uid == -1
4158 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4160 error ("incorrect entry in label_to_block_map");
4161 err |= true;
4164 uid = EH_LANDING_PAD_NR (decl);
4165 if (uid)
4167 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4168 if (decl != lp->post_landing_pad)
4170 error ("incorrect setting of landing pad number");
4171 err |= true;
4176 err |= verify_stmt (&gsi);
4178 #ifdef ENABLE_TYPES_CHECKING
4179 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4181 debug_gimple_stmt (stmt);
4182 err |= true;
4184 #endif
4185 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4186 if (addr)
4188 error ("incorrect sharing of tree nodes");
4189 debug_gimple_stmt (stmt);
4190 debug_generic_expr (addr);
4191 err |= true;
4193 gsi_next (&gsi);
4197 eh_error_found = false;
4198 if (get_eh_throw_stmt_table (cfun))
4199 htab_traverse (get_eh_throw_stmt_table (cfun),
4200 verify_eh_throw_stmt_node,
4201 visited_stmts);
4203 if (err | eh_error_found)
4204 internal_error ("verify_stmts failed");
4206 pointer_set_destroy (visited);
4207 pointer_set_destroy (visited_stmts);
4208 verify_histograms ();
4209 timevar_pop (TV_TREE_STMT_VERIFY);
4213 /* Verifies that the flow information is OK. */
4215 static int
4216 gimple_verify_flow_info (void)
4218 int err = 0;
4219 basic_block bb;
4220 gimple_stmt_iterator gsi;
4221 gimple stmt;
4222 edge e;
4223 edge_iterator ei;
4225 if (ENTRY_BLOCK_PTR->il.gimple)
4227 error ("ENTRY_BLOCK has IL associated with it");
4228 err = 1;
4231 if (EXIT_BLOCK_PTR->il.gimple)
4233 error ("EXIT_BLOCK has IL associated with it");
4234 err = 1;
4237 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4238 if (e->flags & EDGE_FALLTHRU)
4240 error ("fallthru to exit from bb %d", e->src->index);
4241 err = 1;
4244 FOR_EACH_BB (bb)
4246 bool found_ctrl_stmt = false;
4248 stmt = NULL;
4250 /* Skip labels on the start of basic block. */
4251 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4253 tree label;
4254 gimple prev_stmt = stmt;
4256 stmt = gsi_stmt (gsi);
4258 if (gimple_code (stmt) != GIMPLE_LABEL)
4259 break;
4261 label = gimple_label_label (stmt);
4262 if (prev_stmt && DECL_NONLOCAL (label))
4264 error ("nonlocal label ");
4265 print_generic_expr (stderr, label, 0);
4266 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4267 bb->index);
4268 err = 1;
4271 if (label_to_block (label) != bb)
4273 error ("label ");
4274 print_generic_expr (stderr, label, 0);
4275 fprintf (stderr, " to block does not match in bb %d",
4276 bb->index);
4277 err = 1;
4280 if (decl_function_context (label) != current_function_decl)
4282 error ("label ");
4283 print_generic_expr (stderr, label, 0);
4284 fprintf (stderr, " has incorrect context in bb %d",
4285 bb->index);
4286 err = 1;
4290 /* Verify that body of basic block BB is free of control flow. */
4291 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4293 gimple stmt = gsi_stmt (gsi);
4295 if (found_ctrl_stmt)
4297 error ("control flow in the middle of basic block %d",
4298 bb->index);
4299 err = 1;
4302 if (stmt_ends_bb_p (stmt))
4303 found_ctrl_stmt = true;
4305 if (gimple_code (stmt) == GIMPLE_LABEL)
4307 error ("label ");
4308 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4309 fprintf (stderr, " in the middle of basic block %d", bb->index);
4310 err = 1;
4314 gsi = gsi_last_bb (bb);
4315 if (gsi_end_p (gsi))
4316 continue;
4318 stmt = gsi_stmt (gsi);
4320 if (gimple_code (stmt) == GIMPLE_LABEL)
4321 continue;
4323 err |= verify_eh_edges (stmt);
4325 if (is_ctrl_stmt (stmt))
4327 FOR_EACH_EDGE (e, ei, bb->succs)
4328 if (e->flags & EDGE_FALLTHRU)
4330 error ("fallthru edge after a control statement in bb %d",
4331 bb->index);
4332 err = 1;
4336 if (gimple_code (stmt) != GIMPLE_COND)
4338 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4339 after anything else but if statement. */
4340 FOR_EACH_EDGE (e, ei, bb->succs)
4341 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4343 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4344 bb->index);
4345 err = 1;
4349 switch (gimple_code (stmt))
4351 case GIMPLE_COND:
4353 edge true_edge;
4354 edge false_edge;
4356 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4358 if (!true_edge
4359 || !false_edge
4360 || !(true_edge->flags & EDGE_TRUE_VALUE)
4361 || !(false_edge->flags & EDGE_FALSE_VALUE)
4362 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4363 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4364 || EDGE_COUNT (bb->succs) >= 3)
4366 error ("wrong outgoing edge flags at end of bb %d",
4367 bb->index);
4368 err = 1;
4371 break;
4373 case GIMPLE_GOTO:
4374 if (simple_goto_p (stmt))
4376 error ("explicit goto at end of bb %d", bb->index);
4377 err = 1;
4379 else
4381 /* FIXME. We should double check that the labels in the
4382 destination blocks have their address taken. */
4383 FOR_EACH_EDGE (e, ei, bb->succs)
4384 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4385 | EDGE_FALSE_VALUE))
4386 || !(e->flags & EDGE_ABNORMAL))
4388 error ("wrong outgoing edge flags at end of bb %d",
4389 bb->index);
4390 err = 1;
4393 break;
4395 case GIMPLE_RETURN:
4396 if (!single_succ_p (bb)
4397 || (single_succ_edge (bb)->flags
4398 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4399 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4401 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4402 err = 1;
4404 if (single_succ (bb) != EXIT_BLOCK_PTR)
4406 error ("return edge does not point to exit in bb %d",
4407 bb->index);
4408 err = 1;
4410 break;
4412 case GIMPLE_SWITCH:
4414 tree prev;
4415 edge e;
4416 size_t i, n;
4418 n = gimple_switch_num_labels (stmt);
4420 /* Mark all the destination basic blocks. */
4421 for (i = 0; i < n; ++i)
4423 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4424 basic_block label_bb = label_to_block (lab);
4425 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4426 label_bb->aux = (void *)1;
4429 /* Verify that the case labels are sorted. */
4430 prev = gimple_switch_label (stmt, 0);
4431 for (i = 1; i < n; ++i)
4433 tree c = gimple_switch_label (stmt, i);
4434 if (!CASE_LOW (c))
4436 error ("found default case not at the start of "
4437 "case vector");
4438 err = 1;
4439 continue;
4441 if (CASE_LOW (prev)
4442 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4444 error ("case labels not sorted: ");
4445 print_generic_expr (stderr, prev, 0);
4446 fprintf (stderr," is greater than ");
4447 print_generic_expr (stderr, c, 0);
4448 fprintf (stderr," but comes before it.\n");
4449 err = 1;
4451 prev = c;
4453 /* VRP will remove the default case if it can prove it will
4454 never be executed. So do not verify there always exists
4455 a default case here. */
4457 FOR_EACH_EDGE (e, ei, bb->succs)
4459 if (!e->dest->aux)
4461 error ("extra outgoing edge %d->%d",
4462 bb->index, e->dest->index);
4463 err = 1;
4466 e->dest->aux = (void *)2;
4467 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4468 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4470 error ("wrong outgoing edge flags at end of bb %d",
4471 bb->index);
4472 err = 1;
4476 /* Check that we have all of them. */
4477 for (i = 0; i < n; ++i)
4479 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4480 basic_block label_bb = label_to_block (lab);
4482 if (label_bb->aux != (void *)2)
4484 error ("missing edge %i->%i", bb->index, label_bb->index);
4485 err = 1;
4489 FOR_EACH_EDGE (e, ei, bb->succs)
4490 e->dest->aux = (void *)0;
4492 break;
4494 case GIMPLE_EH_DISPATCH:
4495 err |= verify_eh_dispatch_edge (stmt);
4496 break;
4498 default:
4499 break;
4503 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4504 verify_dominators (CDI_DOMINATORS);
4506 return err;
4510 /* Updates phi nodes after creating a forwarder block joined
4511 by edge FALLTHRU. */
4513 static void
4514 gimple_make_forwarder_block (edge fallthru)
4516 edge e;
4517 edge_iterator ei;
4518 basic_block dummy, bb;
4519 tree var;
4520 gimple_stmt_iterator gsi;
4522 dummy = fallthru->src;
4523 bb = fallthru->dest;
4525 if (single_pred_p (bb))
4526 return;
4528 /* If we redirected a branch we must create new PHI nodes at the
4529 start of BB. */
4530 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4532 gimple phi, new_phi;
4534 phi = gsi_stmt (gsi);
4535 var = gimple_phi_result (phi);
4536 new_phi = create_phi_node (var, bb);
4537 SSA_NAME_DEF_STMT (var) = new_phi;
4538 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4539 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4540 UNKNOWN_LOCATION);
4543 /* Add the arguments we have stored on edges. */
4544 FOR_EACH_EDGE (e, ei, bb->preds)
4546 if (e == fallthru)
4547 continue;
4549 flush_pending_stmts (e);
4554 /* Return a non-special label in the head of basic block BLOCK.
4555 Create one if it doesn't exist. */
4557 tree
4558 gimple_block_label (basic_block bb)
4560 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4561 bool first = true;
4562 tree label;
4563 gimple stmt;
4565 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4567 stmt = gsi_stmt (i);
4568 if (gimple_code (stmt) != GIMPLE_LABEL)
4569 break;
4570 label = gimple_label_label (stmt);
4571 if (!DECL_NONLOCAL (label))
4573 if (!first)
4574 gsi_move_before (&i, &s);
4575 return label;
4579 label = create_artificial_label (UNKNOWN_LOCATION);
4580 stmt = gimple_build_label (label);
4581 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4582 return label;
4586 /* Attempt to perform edge redirection by replacing a possibly complex
4587 jump instruction by a goto or by removing the jump completely.
4588 This can apply only if all edges now point to the same block. The
4589 parameters and return values are equivalent to
4590 redirect_edge_and_branch. */
4592 static edge
4593 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4595 basic_block src = e->src;
4596 gimple_stmt_iterator i;
4597 gimple stmt;
4599 /* We can replace or remove a complex jump only when we have exactly
4600 two edges. */
4601 if (EDGE_COUNT (src->succs) != 2
4602 /* Verify that all targets will be TARGET. Specifically, the
4603 edge that is not E must also go to TARGET. */
4604 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4605 return NULL;
4607 i = gsi_last_bb (src);
4608 if (gsi_end_p (i))
4609 return NULL;
4611 stmt = gsi_stmt (i);
4613 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4615 gsi_remove (&i, true);
4616 e = ssa_redirect_edge (e, target);
4617 e->flags = EDGE_FALLTHRU;
4618 return e;
4621 return NULL;
4625 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4626 edge representing the redirected branch. */
4628 static edge
4629 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4631 basic_block bb = e->src;
4632 gimple_stmt_iterator gsi;
4633 edge ret;
4634 gimple stmt;
4636 if (e->flags & EDGE_ABNORMAL)
4637 return NULL;
4639 if (e->dest == dest)
4640 return NULL;
4642 if (e->flags & EDGE_EH)
4643 return redirect_eh_edge (e, dest);
4645 if (e->src != ENTRY_BLOCK_PTR)
4647 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4648 if (ret)
4649 return ret;
4652 gsi = gsi_last_bb (bb);
4653 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4655 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4657 case GIMPLE_COND:
4658 /* For COND_EXPR, we only need to redirect the edge. */
4659 break;
4661 case GIMPLE_GOTO:
4662 /* No non-abnormal edges should lead from a non-simple goto, and
4663 simple ones should be represented implicitly. */
4664 gcc_unreachable ();
4666 case GIMPLE_SWITCH:
4668 tree label = gimple_block_label (dest);
4669 tree cases = get_cases_for_edge (e, stmt);
4671 /* If we have a list of cases associated with E, then use it
4672 as it's a lot faster than walking the entire case vector. */
4673 if (cases)
4675 edge e2 = find_edge (e->src, dest);
4676 tree last, first;
4678 first = cases;
4679 while (cases)
4681 last = cases;
4682 CASE_LABEL (cases) = label;
4683 cases = TREE_CHAIN (cases);
4686 /* If there was already an edge in the CFG, then we need
4687 to move all the cases associated with E to E2. */
4688 if (e2)
4690 tree cases2 = get_cases_for_edge (e2, stmt);
4692 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4693 TREE_CHAIN (cases2) = first;
4696 else
4698 size_t i, n = gimple_switch_num_labels (stmt);
4700 for (i = 0; i < n; i++)
4702 tree elt = gimple_switch_label (stmt, i);
4703 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4704 CASE_LABEL (elt) = label;
4708 break;
4710 case GIMPLE_ASM:
4712 int i, n = gimple_asm_nlabels (stmt);
4713 tree label = NULL;
4715 for (i = 0; i < n; ++i)
4717 tree cons = gimple_asm_label_op (stmt, i);
4718 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4720 if (!label)
4721 label = gimple_block_label (dest);
4722 TREE_VALUE (cons) = label;
4726 /* If we didn't find any label matching the former edge in the
4727 asm labels, we must be redirecting the fallthrough
4728 edge. */
4729 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4731 break;
4733 case GIMPLE_RETURN:
4734 gsi_remove (&gsi, true);
4735 e->flags |= EDGE_FALLTHRU;
4736 break;
4738 case GIMPLE_OMP_RETURN:
4739 case GIMPLE_OMP_CONTINUE:
4740 case GIMPLE_OMP_SECTIONS_SWITCH:
4741 case GIMPLE_OMP_FOR:
4742 /* The edges from OMP constructs can be simply redirected. */
4743 break;
4745 case GIMPLE_EH_DISPATCH:
4746 if (!(e->flags & EDGE_FALLTHRU))
4747 redirect_eh_dispatch_edge (stmt, e, dest);
4748 break;
4750 default:
4751 /* Otherwise it must be a fallthru edge, and we don't need to
4752 do anything besides redirecting it. */
4753 gcc_assert (e->flags & EDGE_FALLTHRU);
4754 break;
4757 /* Update/insert PHI nodes as necessary. */
4759 /* Now update the edges in the CFG. */
4760 e = ssa_redirect_edge (e, dest);
4762 return e;
4765 /* Returns true if it is possible to remove edge E by redirecting
4766 it to the destination of the other edge from E->src. */
4768 static bool
4769 gimple_can_remove_branch_p (const_edge e)
4771 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4772 return false;
4774 return true;
4777 /* Simple wrapper, as we can always redirect fallthru edges. */
4779 static basic_block
4780 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4782 e = gimple_redirect_edge_and_branch (e, dest);
4783 gcc_assert (e);
4785 return NULL;
4789 /* Splits basic block BB after statement STMT (but at least after the
4790 labels). If STMT is NULL, BB is split just after the labels. */
4792 static basic_block
4793 gimple_split_block (basic_block bb, void *stmt)
4795 gimple_stmt_iterator gsi;
4796 gimple_stmt_iterator gsi_tgt;
4797 gimple act;
4798 gimple_seq list;
4799 basic_block new_bb;
4800 edge e;
4801 edge_iterator ei;
4803 new_bb = create_empty_bb (bb);
4805 /* Redirect the outgoing edges. */
4806 new_bb->succs = bb->succs;
4807 bb->succs = NULL;
4808 FOR_EACH_EDGE (e, ei, new_bb->succs)
4809 e->src = new_bb;
4811 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4812 stmt = NULL;
4814 /* Move everything from GSI to the new basic block. */
4815 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4817 act = gsi_stmt (gsi);
4818 if (gimple_code (act) == GIMPLE_LABEL)
4819 continue;
4821 if (!stmt)
4822 break;
4824 if (stmt == act)
4826 gsi_next (&gsi);
4827 break;
4831 if (gsi_end_p (gsi))
4832 return new_bb;
4834 /* Split the statement list - avoid re-creating new containers as this
4835 brings ugly quadratic memory consumption in the inliner.
4836 (We are still quadratic since we need to update stmt BB pointers,
4837 sadly.) */
4838 list = gsi_split_seq_before (&gsi);
4839 set_bb_seq (new_bb, list);
4840 for (gsi_tgt = gsi_start (list);
4841 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4842 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4844 return new_bb;
4848 /* Moves basic block BB after block AFTER. */
4850 static bool
4851 gimple_move_block_after (basic_block bb, basic_block after)
4853 if (bb->prev_bb == after)
4854 return true;
4856 unlink_block (bb);
4857 link_block (bb, after);
4859 return true;
4863 /* Return true if basic_block can be duplicated. */
4865 static bool
4866 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4868 return true;
4871 /* Create a duplicate of the basic block BB. NOTE: This does not
4872 preserve SSA form. */
4874 static basic_block
4875 gimple_duplicate_bb (basic_block bb)
4877 basic_block new_bb;
4878 gimple_stmt_iterator gsi, gsi_tgt;
4879 gimple_seq phis = phi_nodes (bb);
4880 gimple phi, stmt, copy;
4882 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4884 /* Copy the PHI nodes. We ignore PHI node arguments here because
4885 the incoming edges have not been setup yet. */
4886 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4888 phi = gsi_stmt (gsi);
4889 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4890 create_new_def_for (gimple_phi_result (copy), copy,
4891 gimple_phi_result_ptr (copy));
4894 gsi_tgt = gsi_start_bb (new_bb);
4895 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4897 def_operand_p def_p;
4898 ssa_op_iter op_iter;
4900 stmt = gsi_stmt (gsi);
4901 if (gimple_code (stmt) == GIMPLE_LABEL)
4902 continue;
4904 /* Create a new copy of STMT and duplicate STMT's virtual
4905 operands. */
4906 copy = gimple_copy (stmt);
4907 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
4909 maybe_duplicate_eh_stmt (copy, stmt);
4910 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4912 /* Create new names for all the definitions created by COPY and
4913 add replacement mappings for each new name. */
4914 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4915 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4918 return new_bb;
4921 /* Add phi arguments to the phi nodes in E_COPY->dest according to
4922 the phi arguments coming from the equivalent edge at
4923 the phi nodes of DEST. */
4925 static void
4926 add_phi_args_after_redirect (edge e_copy, edge orig_e)
4928 gimple_stmt_iterator psi, psi_copy;
4929 gimple phi, phi_copy;
4930 tree def;
4932 for (psi = gsi_start_phis (orig_e->dest),
4933 psi_copy = gsi_start_phis (e_copy->dest);
4934 !gsi_end_p (psi);
4935 gsi_next (&psi), gsi_next (&psi_copy))
4938 phi = gsi_stmt (psi);
4939 phi_copy = gsi_stmt (psi_copy);
4940 def = PHI_ARG_DEF_FROM_EDGE (phi, orig_e);
4941 add_phi_arg (phi_copy, def, e_copy,
4942 gimple_phi_arg_location_from_edge (phi, orig_e));
4946 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
4948 static void
4949 add_phi_args_after_copy_edge (edge e_copy)
4951 basic_block bb, bb_copy = e_copy->src, dest;
4952 edge e;
4953 edge_iterator ei;
4954 gimple phi, phi_copy;
4955 tree def;
4956 gimple_stmt_iterator psi, psi_copy;
4958 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
4959 return;
4961 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4963 if (e_copy->dest->flags & BB_DUPLICATED)
4964 dest = get_bb_original (e_copy->dest);
4965 else
4966 dest = e_copy->dest;
4968 e = find_edge (bb, dest);
4969 if (!e)
4971 /* During loop unrolling the target of the latch edge is copied.
4972 In this case we are not looking for edge to dest, but to
4973 duplicated block whose original was dest. */
4974 FOR_EACH_EDGE (e, ei, bb->succs)
4976 if ((e->dest->flags & BB_DUPLICATED)
4977 && get_bb_original (e->dest) == dest)
4978 break;
4981 gcc_assert (e != NULL);
4984 for (psi = gsi_start_phis (e->dest),
4985 psi_copy = gsi_start_phis (e_copy->dest);
4986 !gsi_end_p (psi);
4987 gsi_next (&psi), gsi_next (&psi_copy))
4989 phi = gsi_stmt (psi);
4990 phi_copy = gsi_stmt (psi_copy);
4991 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4992 add_phi_arg (phi_copy, def, e_copy,
4993 gimple_phi_arg_location_from_edge (phi, e));
4998 /* Basic block BB_COPY was created by code duplication. Add phi node
4999 arguments for edges going out of BB_COPY. The blocks that were
5000 duplicated have BB_DUPLICATED set. */
5002 void
5003 add_phi_args_after_copy_bb (basic_block bb_copy)
5005 edge e_copy;
5006 edge_iterator ei;
5008 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5010 add_phi_args_after_copy_edge (e_copy);
5014 /* Blocks in REGION_COPY array of length N_REGION were created by
5015 duplication of basic blocks. Add phi node arguments for edges
5016 going from these blocks. If E_COPY is not NULL, also add
5017 phi node arguments for its destination.*/
5019 void
5020 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5021 edge e_copy)
5023 unsigned i;
5025 for (i = 0; i < n_region; i++)
5026 region_copy[i]->flags |= BB_DUPLICATED;
5028 for (i = 0; i < n_region; i++)
5029 add_phi_args_after_copy_bb (region_copy[i]);
5030 if (e_copy)
5031 add_phi_args_after_copy_edge (e_copy);
5033 for (i = 0; i < n_region; i++)
5034 region_copy[i]->flags &= ~BB_DUPLICATED;
5037 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5038 important exit edge EXIT. By important we mean that no SSA name defined
5039 inside region is live over the other exit edges of the region. All entry
5040 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5041 to the duplicate of the region. SSA form, dominance and loop information
5042 is updated. The new basic blocks are stored to REGION_COPY in the same
5043 order as they had in REGION, provided that REGION_COPY is not NULL.
5044 The function returns false if it is unable to copy the region,
5045 true otherwise. */
5047 bool
5048 gimple_duplicate_sese_region (edge entry, edge exit,
5049 basic_block *region, unsigned n_region,
5050 basic_block *region_copy)
5052 unsigned i;
5053 bool free_region_copy = false, copying_header = false;
5054 struct loop *loop = entry->dest->loop_father;
5055 edge exit_copy;
5056 VEC (basic_block, heap) *doms;
5057 edge redirected;
5058 int total_freq = 0, entry_freq = 0;
5059 gcov_type total_count = 0, entry_count = 0;
5061 if (!can_copy_bbs_p (region, n_region))
5062 return false;
5064 /* Some sanity checking. Note that we do not check for all possible
5065 missuses of the functions. I.e. if you ask to copy something weird,
5066 it will work, but the state of structures probably will not be
5067 correct. */
5068 for (i = 0; i < n_region; i++)
5070 /* We do not handle subloops, i.e. all the blocks must belong to the
5071 same loop. */
5072 if (region[i]->loop_father != loop)
5073 return false;
5075 if (region[i] != entry->dest
5076 && region[i] == loop->header)
5077 return false;
5080 set_loop_copy (loop, loop);
5082 /* In case the function is used for loop header copying (which is the primary
5083 use), ensure that EXIT and its copy will be new latch and entry edges. */
5084 if (loop->header == entry->dest)
5086 copying_header = true;
5087 set_loop_copy (loop, loop_outer (loop));
5089 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5090 return false;
5092 for (i = 0; i < n_region; i++)
5093 if (region[i] != exit->src
5094 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5095 return false;
5098 if (!region_copy)
5100 region_copy = XNEWVEC (basic_block, n_region);
5101 free_region_copy = true;
5104 gcc_assert (!need_ssa_update_p (cfun));
5106 /* Record blocks outside the region that are dominated by something
5107 inside. */
5108 doms = NULL;
5109 initialize_original_copy_tables ();
5111 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5113 if (entry->dest->count)
5115 total_count = entry->dest->count;
5116 entry_count = entry->count;
5117 /* Fix up corner cases, to avoid division by zero or creation of negative
5118 frequencies. */
5119 if (entry_count > total_count)
5120 entry_count = total_count;
5122 else
5124 total_freq = entry->dest->frequency;
5125 entry_freq = EDGE_FREQUENCY (entry);
5126 /* Fix up corner cases, to avoid division by zero or creation of negative
5127 frequencies. */
5128 if (total_freq == 0)
5129 total_freq = 1;
5130 else if (entry_freq > total_freq)
5131 entry_freq = total_freq;
5134 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5135 split_edge_bb_loc (entry));
5136 if (total_count)
5138 scale_bbs_frequencies_gcov_type (region, n_region,
5139 total_count - entry_count,
5140 total_count);
5141 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5142 total_count);
5144 else
5146 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5147 total_freq);
5148 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5151 if (copying_header)
5153 loop->header = exit->dest;
5154 loop->latch = exit->src;
5157 /* Redirect the entry and add the phi node arguments. */
5158 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5159 gcc_assert (redirected != NULL);
5160 flush_pending_stmts (entry);
5162 /* Concerning updating of dominators: We must recount dominators
5163 for entry block and its copy. Anything that is outside of the
5164 region, but was dominated by something inside needs recounting as
5165 well. */
5166 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5167 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5168 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5169 VEC_free (basic_block, heap, doms);
5171 /* Add the other PHI node arguments. */
5172 add_phi_args_after_copy (region_copy, n_region, NULL);
5174 /* Update the SSA web. */
5175 update_ssa (TODO_update_ssa);
5177 if (free_region_copy)
5178 free (region_copy);
5180 free_original_copy_tables ();
5181 return true;
5184 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5185 are stored to REGION_COPY in the same order in that they appear
5186 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5187 the region, EXIT an exit from it. The condition guarding EXIT
5188 is moved to ENTRY. Returns true if duplication succeeds, false
5189 otherwise.
5191 For example,
5193 some_code;
5194 if (cond)
5196 else
5199 is transformed to
5201 if (cond)
5203 some_code;
5206 else
5208 some_code;
5213 bool
5214 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5215 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5216 basic_block *region_copy ATTRIBUTE_UNUSED)
5218 unsigned i;
5219 bool free_region_copy = false;
5220 struct loop *loop = exit->dest->loop_father;
5221 struct loop *orig_loop = entry->dest->loop_father;
5222 basic_block switch_bb, entry_bb, nentry_bb;
5223 VEC (basic_block, heap) *doms;
5224 int total_freq = 0, exit_freq = 0;
5225 gcov_type total_count = 0, exit_count = 0;
5226 edge exits[2], nexits[2], e;
5227 gimple_stmt_iterator gsi,gsi1;
5228 gimple cond_stmt;
5229 edge sorig, snew, orig_e;
5230 basic_block exit_bb;
5231 edge_iterator ei;
5232 VEC (edge, heap) *redirect_edges;
5233 basic_block iters_bb, orig_src;
5234 tree new_rhs;
5236 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5237 exits[0] = exit;
5238 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5240 if (!can_copy_bbs_p (region, n_region))
5241 return false;
5243 /* Some sanity checking. Note that we do not check for all possible
5244 missuses of the functions. I.e. if you ask to copy something weird
5245 (e.g., in the example, if there is a jump from inside to the middle
5246 of some_code, or come_code defines some of the values used in cond)
5247 it will work, but the resulting code will not be correct. */
5248 for (i = 0; i < n_region; i++)
5250 if (region[i] == orig_loop->latch)
5251 return false;
5254 initialize_original_copy_tables ();
5255 set_loop_copy (orig_loop, loop);
5256 duplicate_subloops (orig_loop, loop);
5258 if (!region_copy)
5260 region_copy = XNEWVEC (basic_block, n_region);
5261 free_region_copy = true;
5264 gcc_assert (!need_ssa_update_p (cfun));
5266 /* Record blocks outside the region that are dominated by something
5267 inside. */
5268 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5270 if (exit->src->count)
5272 total_count = exit->src->count;
5273 exit_count = exit->count;
5274 /* Fix up corner cases, to avoid division by zero or creation of negative
5275 frequencies. */
5276 if (exit_count > total_count)
5277 exit_count = total_count;
5279 else
5281 total_freq = exit->src->frequency;
5282 exit_freq = EDGE_FREQUENCY (exit);
5283 /* Fix up corner cases, to avoid division by zero or creation of negative
5284 frequencies. */
5285 if (total_freq == 0)
5286 total_freq = 1;
5287 if (exit_freq > total_freq)
5288 exit_freq = total_freq;
5291 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5292 split_edge_bb_loc (exit));
5293 if (total_count)
5295 scale_bbs_frequencies_gcov_type (region, n_region,
5296 total_count - exit_count,
5297 total_count);
5298 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5299 total_count);
5301 else
5303 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5304 total_freq);
5305 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5308 /* Create the switch block, and put the exit condition to it. */
5309 entry_bb = entry->dest;
5310 nentry_bb = get_bb_copy (entry_bb);
5311 if (!last_stmt (entry->src)
5312 || !stmt_ends_bb_p (last_stmt (entry->src)))
5313 switch_bb = entry->src;
5314 else
5315 switch_bb = split_edge (entry);
5316 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5318 gsi = gsi_last_bb (switch_bb);
5319 cond_stmt = last_stmt (exit->src);
5320 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5321 cond_stmt = gimple_copy (cond_stmt);
5323 /* If the block consisting of the exit condition has the latch as
5324 successor, then the body of the loop is executed before
5325 the exit condition is tested. In such case, moving the
5326 condition to the entry, causes that the loop will iterate
5327 one less iteration (which is the wanted outcome, since we
5328 peel out the last iteration). If the body is executed after
5329 the condition, moving the condition to the entry requires
5330 decrementing one iteration. */
5331 if (exits[1]->dest == orig_loop->latch)
5332 new_rhs = gimple_cond_rhs (cond_stmt);
5333 else
5335 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5336 gimple_cond_rhs (cond_stmt),
5337 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5339 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5341 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5342 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5343 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5344 break;
5346 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5347 NULL_TREE,false,GSI_CONTINUE_LINKING);
5350 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5351 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5352 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5354 sorig = single_succ_edge (switch_bb);
5355 sorig->flags = exits[1]->flags;
5356 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5358 /* Register the new edge from SWITCH_BB in loop exit lists. */
5359 rescan_loop_exit (snew, true, false);
5361 /* Add the PHI node arguments. */
5362 add_phi_args_after_copy (region_copy, n_region, snew);
5364 /* Get rid of now superfluous conditions and associated edges (and phi node
5365 arguments). */
5366 exit_bb = exit->dest;
5368 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5369 PENDING_STMT (e) = NULL;
5371 /* If the block consisting of the exit condition has the latch as
5372 successor, then the body of the loop is executed before
5373 the exit condition is tested.
5375 { body }
5376 { cond } (exit[0]) -> { latch }
5378 V (exit[1])
5380 { exit_bb }
5383 In such case, the equivalent copied edge nexits[1]
5384 (for the peeled iteration) needs to be redirected to exit_bb.
5386 Otherwise,
5388 { cond } (exit[0]) -> { body }
5390 V (exit[1])
5392 { exit_bb }
5395 exit[0] is pointing to the body of the loop,
5396 and the equivalent nexits[0] needs to be redirected to
5397 the copied body (of the peeled iteration). */
5399 if (exits[1]->dest == orig_loop->latch)
5400 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5401 else
5402 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5403 PENDING_STMT (e) = NULL;
5405 redirect_edges = VEC_alloc (edge, heap, 10);
5407 for (i = 0; i < n_region; i++)
5408 region_copy[i]->flags |= BB_DUPLICATED;
5410 /* Iterate all incoming edges to latch. All those coming from
5411 copied bbs will be redirected to exit_bb. */
5412 FOR_EACH_EDGE (e, ei, orig_loop->latch->preds)
5414 if (e->src->flags & BB_DUPLICATED)
5415 VEC_safe_push (edge, heap, redirect_edges, e);
5418 for (i = 0; i < n_region; i++)
5419 region_copy[i]->flags &= ~BB_DUPLICATED;
5421 for (i = 0; VEC_iterate (edge, redirect_edges, i, e); ++i)
5423 e = redirect_edge_and_branch (e, exit_bb);
5424 PENDING_STMT (e) = NULL;
5425 orig_src = get_bb_original (e->src);
5426 orig_e = find_edge (orig_src, orig_loop->latch);
5427 add_phi_args_after_redirect (e, orig_e);
5430 VEC_free (edge, heap, redirect_edges);
5432 /* Anything that is outside of the region, but was dominated by something
5433 inside needs to update dominance info. */
5434 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5435 VEC_free (basic_block, heap, doms);
5437 /* Update the SSA web. */
5438 update_ssa (TODO_update_ssa);
5440 if (free_region_copy)
5441 free (region_copy);
5443 free_original_copy_tables ();
5444 return true;
5447 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5448 adding blocks when the dominator traversal reaches EXIT. This
5449 function silently assumes that ENTRY strictly dominates EXIT. */
5451 void
5452 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5453 VEC(basic_block,heap) **bbs_p)
5455 basic_block son;
5457 for (son = first_dom_son (CDI_DOMINATORS, entry);
5458 son;
5459 son = next_dom_son (CDI_DOMINATORS, son))
5461 VEC_safe_push (basic_block, heap, *bbs_p, son);
5462 if (son != exit)
5463 gather_blocks_in_sese_region (son, exit, bbs_p);
5467 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5468 The duplicates are recorded in VARS_MAP. */
5470 static void
5471 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5472 tree to_context)
5474 tree t = *tp, new_t;
5475 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5476 void **loc;
5478 if (DECL_CONTEXT (t) == to_context)
5479 return;
5481 loc = pointer_map_contains (vars_map, t);
5483 if (!loc)
5485 loc = pointer_map_insert (vars_map, t);
5487 if (SSA_VAR_P (t))
5489 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5490 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5492 else
5494 gcc_assert (TREE_CODE (t) == CONST_DECL);
5495 new_t = copy_node (t);
5497 DECL_CONTEXT (new_t) = to_context;
5499 *loc = new_t;
5501 else
5502 new_t = (tree) *loc;
5504 *tp = new_t;
5508 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5509 VARS_MAP maps old ssa names and var_decls to the new ones. */
5511 static tree
5512 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5513 tree to_context)
5515 void **loc;
5516 tree new_name, decl = SSA_NAME_VAR (name);
5518 gcc_assert (is_gimple_reg (name));
5520 loc = pointer_map_contains (vars_map, name);
5522 if (!loc)
5524 replace_by_duplicate_decl (&decl, vars_map, to_context);
5526 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5527 if (gimple_in_ssa_p (cfun))
5528 add_referenced_var (decl);
5530 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5531 if (SSA_NAME_IS_DEFAULT_DEF (name))
5532 set_default_def (decl, new_name);
5533 pop_cfun ();
5535 loc = pointer_map_insert (vars_map, name);
5536 *loc = new_name;
5538 else
5539 new_name = (tree) *loc;
5541 return new_name;
5544 struct move_stmt_d
5546 tree orig_block;
5547 tree new_block;
5548 tree from_context;
5549 tree to_context;
5550 struct pointer_map_t *vars_map;
5551 htab_t new_label_map;
5552 struct pointer_map_t *eh_map;
5553 bool remap_decls_p;
5556 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5557 contained in *TP if it has been ORIG_BLOCK previously and change the
5558 DECL_CONTEXT of every local variable referenced in *TP. */
5560 static tree
5561 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5563 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5564 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5565 tree t = *tp;
5567 if (EXPR_P (t))
5568 /* We should never have TREE_BLOCK set on non-statements. */
5569 gcc_assert (!TREE_BLOCK (t));
5571 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5573 if (TREE_CODE (t) == SSA_NAME)
5574 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5575 else if (TREE_CODE (t) == LABEL_DECL)
5577 if (p->new_label_map)
5579 struct tree_map in, *out;
5580 in.base.from = t;
5581 out = (struct tree_map *)
5582 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5583 if (out)
5584 *tp = t = out->to;
5587 DECL_CONTEXT (t) = p->to_context;
5589 else if (p->remap_decls_p)
5591 /* Replace T with its duplicate. T should no longer appear in the
5592 parent function, so this looks wasteful; however, it may appear
5593 in referenced_vars, and more importantly, as virtual operands of
5594 statements, and in alias lists of other variables. It would be
5595 quite difficult to expunge it from all those places. ??? It might
5596 suffice to do this for addressable variables. */
5597 if ((TREE_CODE (t) == VAR_DECL
5598 && !is_global_var (t))
5599 || TREE_CODE (t) == CONST_DECL)
5600 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5602 if (SSA_VAR_P (t)
5603 && gimple_in_ssa_p (cfun))
5605 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5606 add_referenced_var (*tp);
5607 pop_cfun ();
5610 *walk_subtrees = 0;
5612 else if (TYPE_P (t))
5613 *walk_subtrees = 0;
5615 return NULL_TREE;
5618 /* Helper for move_stmt_r. Given an EH region number for the source
5619 function, map that to the duplicate EH regio number in the dest. */
5621 static int
5622 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5624 eh_region old_r, new_r;
5625 void **slot;
5627 old_r = get_eh_region_from_number (old_nr);
5628 slot = pointer_map_contains (p->eh_map, old_r);
5629 new_r = (eh_region) *slot;
5631 return new_r->index;
5634 /* Similar, but operate on INTEGER_CSTs. */
5636 static tree
5637 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5639 int old_nr, new_nr;
5641 old_nr = tree_low_cst (old_t_nr, 0);
5642 new_nr = move_stmt_eh_region_nr (old_nr, p);
5644 return build_int_cst (NULL, new_nr);
5647 /* Like move_stmt_op, but for gimple statements.
5649 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5650 contained in the current statement in *GSI_P and change the
5651 DECL_CONTEXT of every local variable referenced in the current
5652 statement. */
5654 static tree
5655 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5656 struct walk_stmt_info *wi)
5658 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5659 gimple stmt = gsi_stmt (*gsi_p);
5660 tree block = gimple_block (stmt);
5662 if (p->orig_block == NULL_TREE
5663 || block == p->orig_block
5664 || block == NULL_TREE)
5665 gimple_set_block (stmt, p->new_block);
5666 #ifdef ENABLE_CHECKING
5667 else if (block != p->new_block)
5669 while (block && block != p->orig_block)
5670 block = BLOCK_SUPERCONTEXT (block);
5671 gcc_assert (block);
5673 #endif
5675 switch (gimple_code (stmt))
5677 case GIMPLE_CALL:
5678 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5680 tree r, fndecl = gimple_call_fndecl (stmt);
5681 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5682 switch (DECL_FUNCTION_CODE (fndecl))
5684 case BUILT_IN_EH_COPY_VALUES:
5685 r = gimple_call_arg (stmt, 1);
5686 r = move_stmt_eh_region_tree_nr (r, p);
5687 gimple_call_set_arg (stmt, 1, r);
5688 /* FALLTHRU */
5690 case BUILT_IN_EH_POINTER:
5691 case BUILT_IN_EH_FILTER:
5692 r = gimple_call_arg (stmt, 0);
5693 r = move_stmt_eh_region_tree_nr (r, p);
5694 gimple_call_set_arg (stmt, 0, r);
5695 break;
5697 default:
5698 break;
5701 break;
5703 case GIMPLE_RESX:
5705 int r = gimple_resx_region (stmt);
5706 r = move_stmt_eh_region_nr (r, p);
5707 gimple_resx_set_region (stmt, r);
5709 break;
5711 case GIMPLE_EH_DISPATCH:
5713 int r = gimple_eh_dispatch_region (stmt);
5714 r = move_stmt_eh_region_nr (r, p);
5715 gimple_eh_dispatch_set_region (stmt, r);
5717 break;
5719 case GIMPLE_OMP_RETURN:
5720 case GIMPLE_OMP_CONTINUE:
5721 break;
5722 default:
5723 if (is_gimple_omp (stmt))
5725 /* Do not remap variables inside OMP directives. Variables
5726 referenced in clauses and directive header belong to the
5727 parent function and should not be moved into the child
5728 function. */
5729 bool save_remap_decls_p = p->remap_decls_p;
5730 p->remap_decls_p = false;
5731 *handled_ops_p = true;
5733 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5734 move_stmt_op, wi);
5736 p->remap_decls_p = save_remap_decls_p;
5738 break;
5741 return NULL_TREE;
5744 /* Marks virtual operands of all statements in basic blocks BBS for
5745 renaming. */
5747 void
5748 mark_virtual_ops_in_bb (basic_block bb)
5750 gimple_stmt_iterator gsi;
5752 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5753 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5755 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5756 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5759 /* Move basic block BB from function CFUN to function DEST_FN. The
5760 block is moved out of the original linked list and placed after
5761 block AFTER in the new list. Also, the block is removed from the
5762 original array of blocks and placed in DEST_FN's array of blocks.
5763 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5764 updated to reflect the moved edges.
5766 The local variables are remapped to new instances, VARS_MAP is used
5767 to record the mapping. */
5769 static void
5770 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5771 basic_block after, bool update_edge_count_p,
5772 struct move_stmt_d *d)
5774 struct control_flow_graph *cfg;
5775 edge_iterator ei;
5776 edge e;
5777 gimple_stmt_iterator si;
5778 unsigned old_len, new_len;
5780 /* Remove BB from dominance structures. */
5781 delete_from_dominance_info (CDI_DOMINATORS, bb);
5782 if (current_loops)
5783 remove_bb_from_loops (bb);
5785 /* Link BB to the new linked list. */
5786 move_block_after (bb, after);
5788 /* Update the edge count in the corresponding flowgraphs. */
5789 if (update_edge_count_p)
5790 FOR_EACH_EDGE (e, ei, bb->succs)
5792 cfun->cfg->x_n_edges--;
5793 dest_cfun->cfg->x_n_edges++;
5796 /* Remove BB from the original basic block array. */
5797 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5798 cfun->cfg->x_n_basic_blocks--;
5800 /* Grow DEST_CFUN's basic block array if needed. */
5801 cfg = dest_cfun->cfg;
5802 cfg->x_n_basic_blocks++;
5803 if (bb->index >= cfg->x_last_basic_block)
5804 cfg->x_last_basic_block = bb->index + 1;
5806 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5807 if ((unsigned) cfg->x_last_basic_block >= old_len)
5809 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5810 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5811 new_len);
5814 VEC_replace (basic_block, cfg->x_basic_block_info,
5815 bb->index, bb);
5817 /* Remap the variables in phi nodes. */
5818 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5820 gimple phi = gsi_stmt (si);
5821 use_operand_p use;
5822 tree op = PHI_RESULT (phi);
5823 ssa_op_iter oi;
5825 if (!is_gimple_reg (op))
5827 /* Remove the phi nodes for virtual operands (alias analysis will be
5828 run for the new function, anyway). */
5829 remove_phi_node (&si, true);
5830 continue;
5833 SET_PHI_RESULT (phi,
5834 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5835 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5837 op = USE_FROM_PTR (use);
5838 if (TREE_CODE (op) == SSA_NAME)
5839 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5842 gsi_next (&si);
5845 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5847 gimple stmt = gsi_stmt (si);
5848 struct walk_stmt_info wi;
5850 memset (&wi, 0, sizeof (wi));
5851 wi.info = d;
5852 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5854 if (gimple_code (stmt) == GIMPLE_LABEL)
5856 tree label = gimple_label_label (stmt);
5857 int uid = LABEL_DECL_UID (label);
5859 gcc_assert (uid > -1);
5861 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5862 if (old_len <= (unsigned) uid)
5864 new_len = 3 * uid / 2 + 1;
5865 VEC_safe_grow_cleared (basic_block, gc,
5866 cfg->x_label_to_block_map, new_len);
5869 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5870 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5872 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5874 if (uid >= dest_cfun->cfg->last_label_uid)
5875 dest_cfun->cfg->last_label_uid = uid + 1;
5878 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5879 remove_stmt_from_eh_lp_fn (cfun, stmt);
5881 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5882 gimple_remove_stmt_histograms (cfun, stmt);
5884 /* We cannot leave any operands allocated from the operand caches of
5885 the current function. */
5886 free_stmt_operands (stmt);
5887 push_cfun (dest_cfun);
5888 update_stmt (stmt);
5889 pop_cfun ();
5892 FOR_EACH_EDGE (e, ei, bb->succs)
5893 if (e->goto_locus)
5895 tree block = e->goto_block;
5896 if (d->orig_block == NULL_TREE
5897 || block == d->orig_block)
5898 e->goto_block = d->new_block;
5899 #ifdef ENABLE_CHECKING
5900 else if (block != d->new_block)
5902 while (block && block != d->orig_block)
5903 block = BLOCK_SUPERCONTEXT (block);
5904 gcc_assert (block);
5906 #endif
5910 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5911 the outermost EH region. Use REGION as the incoming base EH region. */
5913 static eh_region
5914 find_outermost_region_in_block (struct function *src_cfun,
5915 basic_block bb, eh_region region)
5917 gimple_stmt_iterator si;
5919 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5921 gimple stmt = gsi_stmt (si);
5922 eh_region stmt_region;
5923 int lp_nr;
5925 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5926 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5927 if (stmt_region)
5929 if (region == NULL)
5930 region = stmt_region;
5931 else if (stmt_region != region)
5933 region = eh_region_outermost (src_cfun, stmt_region, region);
5934 gcc_assert (region != NULL);
5939 return region;
5942 static tree
5943 new_label_mapper (tree decl, void *data)
5945 htab_t hash = (htab_t) data;
5946 struct tree_map *m;
5947 void **slot;
5949 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5951 m = XNEW (struct tree_map);
5952 m->hash = DECL_UID (decl);
5953 m->base.from = decl;
5954 m->to = create_artificial_label (UNKNOWN_LOCATION);
5955 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5956 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5957 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5959 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5960 gcc_assert (*slot == NULL);
5962 *slot = m;
5964 return m->to;
5967 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5968 subblocks. */
5970 static void
5971 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5972 tree to_context)
5974 tree *tp, t;
5976 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5978 t = *tp;
5979 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5980 continue;
5981 replace_by_duplicate_decl (&t, vars_map, to_context);
5982 if (t != *tp)
5984 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5986 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5987 DECL_HAS_VALUE_EXPR_P (t) = 1;
5989 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5990 *tp = t;
5994 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5995 replace_block_vars_by_duplicates (block, vars_map, to_context);
5998 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5999 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6000 single basic block in the original CFG and the new basic block is
6001 returned. DEST_CFUN must not have a CFG yet.
6003 Note that the region need not be a pure SESE region. Blocks inside
6004 the region may contain calls to abort/exit. The only restriction
6005 is that ENTRY_BB should be the only entry point and it must
6006 dominate EXIT_BB.
6008 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6009 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6010 to the new function.
6012 All local variables referenced in the region are assumed to be in
6013 the corresponding BLOCK_VARS and unexpanded variable lists
6014 associated with DEST_CFUN. */
6016 basic_block
6017 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6018 basic_block exit_bb, tree orig_block)
6020 VEC(basic_block,heap) *bbs, *dom_bbs;
6021 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6022 basic_block after, bb, *entry_pred, *exit_succ, abb;
6023 struct function *saved_cfun = cfun;
6024 int *entry_flag, *exit_flag;
6025 unsigned *entry_prob, *exit_prob;
6026 unsigned i, num_entry_edges, num_exit_edges;
6027 edge e;
6028 edge_iterator ei;
6029 htab_t new_label_map;
6030 struct pointer_map_t *vars_map, *eh_map;
6031 struct loop *loop = entry_bb->loop_father;
6032 struct move_stmt_d d;
6034 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6035 region. */
6036 gcc_assert (entry_bb != exit_bb
6037 && (!exit_bb
6038 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6040 /* Collect all the blocks in the region. Manually add ENTRY_BB
6041 because it won't be added by dfs_enumerate_from. */
6042 bbs = NULL;
6043 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6044 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6046 /* The blocks that used to be dominated by something in BBS will now be
6047 dominated by the new block. */
6048 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6049 VEC_address (basic_block, bbs),
6050 VEC_length (basic_block, bbs));
6052 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6053 the predecessor edges to ENTRY_BB and the successor edges to
6054 EXIT_BB so that we can re-attach them to the new basic block that
6055 will replace the region. */
6056 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6057 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6058 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6059 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6060 i = 0;
6061 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6063 entry_prob[i] = e->probability;
6064 entry_flag[i] = e->flags;
6065 entry_pred[i++] = e->src;
6066 remove_edge (e);
6069 if (exit_bb)
6071 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6072 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6073 sizeof (basic_block));
6074 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6075 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6076 i = 0;
6077 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6079 exit_prob[i] = e->probability;
6080 exit_flag[i] = e->flags;
6081 exit_succ[i++] = e->dest;
6082 remove_edge (e);
6085 else
6087 num_exit_edges = 0;
6088 exit_succ = NULL;
6089 exit_flag = NULL;
6090 exit_prob = NULL;
6093 /* Switch context to the child function to initialize DEST_FN's CFG. */
6094 gcc_assert (dest_cfun->cfg == NULL);
6095 push_cfun (dest_cfun);
6097 init_empty_tree_cfg ();
6099 /* Initialize EH information for the new function. */
6100 eh_map = NULL;
6101 new_label_map = NULL;
6102 if (saved_cfun->eh)
6104 eh_region region = NULL;
6106 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6107 region = find_outermost_region_in_block (saved_cfun, bb, region);
6109 init_eh_for_function ();
6110 if (region != NULL)
6112 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6113 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6114 new_label_mapper, new_label_map);
6118 pop_cfun ();
6120 /* Move blocks from BBS into DEST_CFUN. */
6121 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6122 after = dest_cfun->cfg->x_entry_block_ptr;
6123 vars_map = pointer_map_create ();
6125 memset (&d, 0, sizeof (d));
6126 d.orig_block = orig_block;
6127 d.new_block = DECL_INITIAL (dest_cfun->decl);
6128 d.from_context = cfun->decl;
6129 d.to_context = dest_cfun->decl;
6130 d.vars_map = vars_map;
6131 d.new_label_map = new_label_map;
6132 d.eh_map = eh_map;
6133 d.remap_decls_p = true;
6135 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6137 /* No need to update edge counts on the last block. It has
6138 already been updated earlier when we detached the region from
6139 the original CFG. */
6140 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6141 after = bb;
6144 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6145 if (orig_block)
6147 tree block;
6148 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6149 == NULL_TREE);
6150 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6151 = BLOCK_SUBBLOCKS (orig_block);
6152 for (block = BLOCK_SUBBLOCKS (orig_block);
6153 block; block = BLOCK_CHAIN (block))
6154 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6155 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6158 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6159 vars_map, dest_cfun->decl);
6161 if (new_label_map)
6162 htab_delete (new_label_map);
6163 if (eh_map)
6164 pointer_map_destroy (eh_map);
6165 pointer_map_destroy (vars_map);
6167 /* Rewire the entry and exit blocks. The successor to the entry
6168 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6169 the child function. Similarly, the predecessor of DEST_FN's
6170 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6171 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6172 various CFG manipulation function get to the right CFG.
6174 FIXME, this is silly. The CFG ought to become a parameter to
6175 these helpers. */
6176 push_cfun (dest_cfun);
6177 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6178 if (exit_bb)
6179 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6180 pop_cfun ();
6182 /* Back in the original function, the SESE region has disappeared,
6183 create a new basic block in its place. */
6184 bb = create_empty_bb (entry_pred[0]);
6185 if (current_loops)
6186 add_bb_to_loop (bb, loop);
6187 for (i = 0; i < num_entry_edges; i++)
6189 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6190 e->probability = entry_prob[i];
6193 for (i = 0; i < num_exit_edges; i++)
6195 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6196 e->probability = exit_prob[i];
6199 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6200 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6201 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6202 VEC_free (basic_block, heap, dom_bbs);
6204 if (exit_bb)
6206 free (exit_prob);
6207 free (exit_flag);
6208 free (exit_succ);
6210 free (entry_prob);
6211 free (entry_flag);
6212 free (entry_pred);
6213 VEC_free (basic_block, heap, bbs);
6215 return bb;
6219 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6222 void
6223 dump_function_to_file (tree fn, FILE *file, int flags)
6225 tree arg, vars, var;
6226 struct function *dsf;
6227 bool ignore_topmost_bind = false, any_var = false;
6228 basic_block bb;
6229 tree chain;
6231 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6233 arg = DECL_ARGUMENTS (fn);
6234 while (arg)
6236 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6237 fprintf (file, " ");
6238 print_generic_expr (file, arg, dump_flags);
6239 if (flags & TDF_VERBOSE)
6240 print_node (file, "", arg, 4);
6241 if (TREE_CHAIN (arg))
6242 fprintf (file, ", ");
6243 arg = TREE_CHAIN (arg);
6245 fprintf (file, ")\n");
6247 if (flags & TDF_VERBOSE)
6248 print_node (file, "", fn, 2);
6250 dsf = DECL_STRUCT_FUNCTION (fn);
6251 if (dsf && (flags & TDF_EH))
6252 dump_eh_tree (file, dsf);
6254 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6256 dump_node (fn, TDF_SLIM | flags, file);
6257 return;
6260 /* Switch CFUN to point to FN. */
6261 push_cfun (DECL_STRUCT_FUNCTION (fn));
6263 /* When GIMPLE is lowered, the variables are no longer available in
6264 BIND_EXPRs, so display them separately. */
6265 if (cfun && cfun->decl == fn && cfun->local_decls)
6267 ignore_topmost_bind = true;
6269 fprintf (file, "{\n");
6270 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6272 var = TREE_VALUE (vars);
6274 print_generic_decl (file, var, flags);
6275 if (flags & TDF_VERBOSE)
6276 print_node (file, "", var, 4);
6277 fprintf (file, "\n");
6279 any_var = true;
6283 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6285 /* If the CFG has been built, emit a CFG-based dump. */
6286 check_bb_profile (ENTRY_BLOCK_PTR, file);
6287 if (!ignore_topmost_bind)
6288 fprintf (file, "{\n");
6290 if (any_var && n_basic_blocks)
6291 fprintf (file, "\n");
6293 FOR_EACH_BB (bb)
6294 gimple_dump_bb (bb, file, 2, flags);
6296 fprintf (file, "}\n");
6297 check_bb_profile (EXIT_BLOCK_PTR, file);
6299 else if (DECL_SAVED_TREE (fn) == NULL)
6301 /* The function is now in GIMPLE form but the CFG has not been
6302 built yet. Emit the single sequence of GIMPLE statements
6303 that make up its body. */
6304 gimple_seq body = gimple_body (fn);
6306 if (gimple_seq_first_stmt (body)
6307 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6308 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6309 print_gimple_seq (file, body, 0, flags);
6310 else
6312 if (!ignore_topmost_bind)
6313 fprintf (file, "{\n");
6315 if (any_var)
6316 fprintf (file, "\n");
6318 print_gimple_seq (file, body, 2, flags);
6319 fprintf (file, "}\n");
6322 else
6324 int indent;
6326 /* Make a tree based dump. */
6327 chain = DECL_SAVED_TREE (fn);
6329 if (chain && TREE_CODE (chain) == BIND_EXPR)
6331 if (ignore_topmost_bind)
6333 chain = BIND_EXPR_BODY (chain);
6334 indent = 2;
6336 else
6337 indent = 0;
6339 else
6341 if (!ignore_topmost_bind)
6342 fprintf (file, "{\n");
6343 indent = 2;
6346 if (any_var)
6347 fprintf (file, "\n");
6349 print_generic_stmt_indented (file, chain, flags, indent);
6350 if (ignore_topmost_bind)
6351 fprintf (file, "}\n");
6354 fprintf (file, "\n\n");
6356 /* Restore CFUN. */
6357 pop_cfun ();
6361 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6363 void
6364 debug_function (tree fn, int flags)
6366 dump_function_to_file (fn, stderr, flags);
6370 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6372 static void
6373 print_pred_bbs (FILE *file, basic_block bb)
6375 edge e;
6376 edge_iterator ei;
6378 FOR_EACH_EDGE (e, ei, bb->preds)
6379 fprintf (file, "bb_%d ", e->src->index);
6383 /* Print on FILE the indexes for the successors of basic_block BB. */
6385 static void
6386 print_succ_bbs (FILE *file, basic_block bb)
6388 edge e;
6389 edge_iterator ei;
6391 FOR_EACH_EDGE (e, ei, bb->succs)
6392 fprintf (file, "bb_%d ", e->dest->index);
6395 /* Print to FILE the basic block BB following the VERBOSITY level. */
6397 void
6398 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6400 char *s_indent = (char *) alloca ((size_t) indent + 1);
6401 memset ((void *) s_indent, ' ', (size_t) indent);
6402 s_indent[indent] = '\0';
6404 /* Print basic_block's header. */
6405 if (verbosity >= 2)
6407 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6408 print_pred_bbs (file, bb);
6409 fprintf (file, "}, succs = {");
6410 print_succ_bbs (file, bb);
6411 fprintf (file, "})\n");
6414 /* Print basic_block's body. */
6415 if (verbosity >= 3)
6417 fprintf (file, "%s {\n", s_indent);
6418 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6419 fprintf (file, "%s }\n", s_indent);
6423 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6425 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6426 VERBOSITY level this outputs the contents of the loop, or just its
6427 structure. */
6429 static void
6430 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6432 char *s_indent;
6433 basic_block bb;
6435 if (loop == NULL)
6436 return;
6438 s_indent = (char *) alloca ((size_t) indent + 1);
6439 memset ((void *) s_indent, ' ', (size_t) indent);
6440 s_indent[indent] = '\0';
6442 /* Print loop's header. */
6443 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6444 loop->num, loop->header->index, loop->latch->index);
6445 fprintf (file, ", niter = ");
6446 print_generic_expr (file, loop->nb_iterations, 0);
6448 if (loop->any_upper_bound)
6450 fprintf (file, ", upper_bound = ");
6451 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6454 if (loop->any_estimate)
6456 fprintf (file, ", estimate = ");
6457 dump_double_int (file, loop->nb_iterations_estimate, true);
6459 fprintf (file, ")\n");
6461 /* Print loop's body. */
6462 if (verbosity >= 1)
6464 fprintf (file, "%s{\n", s_indent);
6465 FOR_EACH_BB (bb)
6466 if (bb->loop_father == loop)
6467 print_loops_bb (file, bb, indent, verbosity);
6469 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6470 fprintf (file, "%s}\n", s_indent);
6474 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6475 spaces. Following VERBOSITY level this outputs the contents of the
6476 loop, or just its structure. */
6478 static void
6479 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6481 if (loop == NULL)
6482 return;
6484 print_loop (file, loop, indent, verbosity);
6485 print_loop_and_siblings (file, loop->next, indent, verbosity);
6488 /* Follow a CFG edge from the entry point of the program, and on entry
6489 of a loop, pretty print the loop structure on FILE. */
6491 void
6492 print_loops (FILE *file, int verbosity)
6494 basic_block bb;
6496 bb = ENTRY_BLOCK_PTR;
6497 if (bb && bb->loop_father)
6498 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6502 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6504 void
6505 debug_loops (int verbosity)
6507 print_loops (stderr, verbosity);
6510 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6512 void
6513 debug_loop (struct loop *loop, int verbosity)
6515 print_loop (stderr, loop, 0, verbosity);
6518 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6519 level. */
6521 void
6522 debug_loop_num (unsigned num, int verbosity)
6524 debug_loop (get_loop (num), verbosity);
6527 /* Return true if BB ends with a call, possibly followed by some
6528 instructions that must stay with the call. Return false,
6529 otherwise. */
6531 static bool
6532 gimple_block_ends_with_call_p (basic_block bb)
6534 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6535 return is_gimple_call (gsi_stmt (gsi));
6539 /* Return true if BB ends with a conditional branch. Return false,
6540 otherwise. */
6542 static bool
6543 gimple_block_ends_with_condjump_p (const_basic_block bb)
6545 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6546 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6550 /* Return true if we need to add fake edge to exit at statement T.
6551 Helper function for gimple_flow_call_edges_add. */
6553 static bool
6554 need_fake_edge_p (gimple t)
6556 tree fndecl = NULL_TREE;
6557 int call_flags = 0;
6559 /* NORETURN and LONGJMP calls already have an edge to exit.
6560 CONST and PURE calls do not need one.
6561 We don't currently check for CONST and PURE here, although
6562 it would be a good idea, because those attributes are
6563 figured out from the RTL in mark_constant_function, and
6564 the counter incrementation code from -fprofile-arcs
6565 leads to different results from -fbranch-probabilities. */
6566 if (is_gimple_call (t))
6568 fndecl = gimple_call_fndecl (t);
6569 call_flags = gimple_call_flags (t);
6572 if (is_gimple_call (t)
6573 && fndecl
6574 && DECL_BUILT_IN (fndecl)
6575 && (call_flags & ECF_NOTHROW)
6576 && !(call_flags & ECF_RETURNS_TWICE)
6577 /* fork() doesn't really return twice, but the effect of
6578 wrapping it in __gcov_fork() which calls __gcov_flush()
6579 and clears the counters before forking has the same
6580 effect as returning twice. Force a fake edge. */
6581 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6582 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6583 return false;
6585 if (is_gimple_call (t)
6586 && !(call_flags & ECF_NORETURN))
6587 return true;
6589 if (gimple_code (t) == GIMPLE_ASM
6590 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6591 return true;
6593 return false;
6597 /* Add fake edges to the function exit for any non constant and non
6598 noreturn calls, volatile inline assembly in the bitmap of blocks
6599 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6600 the number of blocks that were split.
6602 The goal is to expose cases in which entering a basic block does
6603 not imply that all subsequent instructions must be executed. */
6605 static int
6606 gimple_flow_call_edges_add (sbitmap blocks)
6608 int i;
6609 int blocks_split = 0;
6610 int last_bb = last_basic_block;
6611 bool check_last_block = false;
6613 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6614 return 0;
6616 if (! blocks)
6617 check_last_block = true;
6618 else
6619 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6621 /* In the last basic block, before epilogue generation, there will be
6622 a fallthru edge to EXIT. Special care is required if the last insn
6623 of the last basic block is a call because make_edge folds duplicate
6624 edges, which would result in the fallthru edge also being marked
6625 fake, which would result in the fallthru edge being removed by
6626 remove_fake_edges, which would result in an invalid CFG.
6628 Moreover, we can't elide the outgoing fake edge, since the block
6629 profiler needs to take this into account in order to solve the minimal
6630 spanning tree in the case that the call doesn't return.
6632 Handle this by adding a dummy instruction in a new last basic block. */
6633 if (check_last_block)
6635 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6636 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6637 gimple t = NULL;
6639 if (!gsi_end_p (gsi))
6640 t = gsi_stmt (gsi);
6642 if (t && need_fake_edge_p (t))
6644 edge e;
6646 e = find_edge (bb, EXIT_BLOCK_PTR);
6647 if (e)
6649 gsi_insert_on_edge (e, gimple_build_nop ());
6650 gsi_commit_edge_inserts ();
6655 /* Now add fake edges to the function exit for any non constant
6656 calls since there is no way that we can determine if they will
6657 return or not... */
6658 for (i = 0; i < last_bb; i++)
6660 basic_block bb = BASIC_BLOCK (i);
6661 gimple_stmt_iterator gsi;
6662 gimple stmt, last_stmt;
6664 if (!bb)
6665 continue;
6667 if (blocks && !TEST_BIT (blocks, i))
6668 continue;
6670 gsi = gsi_last_bb (bb);
6671 if (!gsi_end_p (gsi))
6673 last_stmt = gsi_stmt (gsi);
6676 stmt = gsi_stmt (gsi);
6677 if (need_fake_edge_p (stmt))
6679 edge e;
6681 /* The handling above of the final block before the
6682 epilogue should be enough to verify that there is
6683 no edge to the exit block in CFG already.
6684 Calling make_edge in such case would cause us to
6685 mark that edge as fake and remove it later. */
6686 #ifdef ENABLE_CHECKING
6687 if (stmt == last_stmt)
6689 e = find_edge (bb, EXIT_BLOCK_PTR);
6690 gcc_assert (e == NULL);
6692 #endif
6694 /* Note that the following may create a new basic block
6695 and renumber the existing basic blocks. */
6696 if (stmt != last_stmt)
6698 e = split_block (bb, stmt);
6699 if (e)
6700 blocks_split++;
6702 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6704 gsi_prev (&gsi);
6706 while (!gsi_end_p (gsi));
6710 if (blocks_split)
6711 verify_flow_info ();
6713 return blocks_split;
6716 /* Purge dead abnormal call edges from basic block BB. */
6718 bool
6719 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6721 bool changed = gimple_purge_dead_eh_edges (bb);
6723 if (cfun->has_nonlocal_label)
6725 gimple stmt = last_stmt (bb);
6726 edge_iterator ei;
6727 edge e;
6729 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6730 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6732 if (e->flags & EDGE_ABNORMAL)
6734 remove_edge (e);
6735 changed = true;
6737 else
6738 ei_next (&ei);
6741 /* See gimple_purge_dead_eh_edges below. */
6742 if (changed)
6743 free_dominance_info (CDI_DOMINATORS);
6746 return changed;
6749 /* Removes edge E and all the blocks dominated by it, and updates dominance
6750 information. The IL in E->src needs to be updated separately.
6751 If dominance info is not available, only the edge E is removed.*/
6753 void
6754 remove_edge_and_dominated_blocks (edge e)
6756 VEC (basic_block, heap) *bbs_to_remove = NULL;
6757 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6758 bitmap df, df_idom;
6759 edge f;
6760 edge_iterator ei;
6761 bool none_removed = false;
6762 unsigned i;
6763 basic_block bb, dbb;
6764 bitmap_iterator bi;
6766 if (!dom_info_available_p (CDI_DOMINATORS))
6768 remove_edge (e);
6769 return;
6772 /* No updating is needed for edges to exit. */
6773 if (e->dest == EXIT_BLOCK_PTR)
6775 if (cfgcleanup_altered_bbs)
6776 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6777 remove_edge (e);
6778 return;
6781 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6782 that is not dominated by E->dest, then this set is empty. Otherwise,
6783 all the basic blocks dominated by E->dest are removed.
6785 Also, to DF_IDOM we store the immediate dominators of the blocks in
6786 the dominance frontier of E (i.e., of the successors of the
6787 removed blocks, if there are any, and of E->dest otherwise). */
6788 FOR_EACH_EDGE (f, ei, e->dest->preds)
6790 if (f == e)
6791 continue;
6793 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6795 none_removed = true;
6796 break;
6800 df = BITMAP_ALLOC (NULL);
6801 df_idom = BITMAP_ALLOC (NULL);
6803 if (none_removed)
6804 bitmap_set_bit (df_idom,
6805 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6806 else
6808 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6809 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6811 FOR_EACH_EDGE (f, ei, bb->succs)
6813 if (f->dest != EXIT_BLOCK_PTR)
6814 bitmap_set_bit (df, f->dest->index);
6817 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6818 bitmap_clear_bit (df, bb->index);
6820 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6822 bb = BASIC_BLOCK (i);
6823 bitmap_set_bit (df_idom,
6824 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6828 if (cfgcleanup_altered_bbs)
6830 /* Record the set of the altered basic blocks. */
6831 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6832 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6835 /* Remove E and the cancelled blocks. */
6836 if (none_removed)
6837 remove_edge (e);
6838 else
6840 /* Walk backwards so as to get a chance to substitute all
6841 released DEFs into debug stmts. See
6842 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6843 details. */
6844 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6845 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6848 /* Update the dominance information. The immediate dominator may change only
6849 for blocks whose immediate dominator belongs to DF_IDOM:
6851 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6852 removal. Let Z the arbitrary block such that idom(Z) = Y and
6853 Z dominates X after the removal. Before removal, there exists a path P
6854 from Y to X that avoids Z. Let F be the last edge on P that is
6855 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6856 dominates W, and because of P, Z does not dominate W), and W belongs to
6857 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6858 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6860 bb = BASIC_BLOCK (i);
6861 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6862 dbb;
6863 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6864 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6867 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6869 BITMAP_FREE (df);
6870 BITMAP_FREE (df_idom);
6871 VEC_free (basic_block, heap, bbs_to_remove);
6872 VEC_free (basic_block, heap, bbs_to_fix_dom);
6875 /* Purge dead EH edges from basic block BB. */
6877 bool
6878 gimple_purge_dead_eh_edges (basic_block bb)
6880 bool changed = false;
6881 edge e;
6882 edge_iterator ei;
6883 gimple stmt = last_stmt (bb);
6885 if (stmt && stmt_can_throw_internal (stmt))
6886 return false;
6888 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6890 if (e->flags & EDGE_EH)
6892 remove_edge_and_dominated_blocks (e);
6893 changed = true;
6895 else
6896 ei_next (&ei);
6899 return changed;
6902 bool
6903 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6905 bool changed = false;
6906 unsigned i;
6907 bitmap_iterator bi;
6909 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6911 basic_block bb = BASIC_BLOCK (i);
6913 /* Earlier gimple_purge_dead_eh_edges could have removed
6914 this basic block already. */
6915 gcc_assert (bb || changed);
6916 if (bb != NULL)
6917 changed |= gimple_purge_dead_eh_edges (bb);
6920 return changed;
6923 /* This function is called whenever a new edge is created or
6924 redirected. */
6926 static void
6927 gimple_execute_on_growing_pred (edge e)
6929 basic_block bb = e->dest;
6931 if (phi_nodes (bb))
6932 reserve_phi_args_for_new_edge (bb);
6935 /* This function is called immediately before edge E is removed from
6936 the edge vector E->dest->preds. */
6938 static void
6939 gimple_execute_on_shrinking_pred (edge e)
6941 if (phi_nodes (e->dest))
6942 remove_phi_args (e);
6945 /*---------------------------------------------------------------------------
6946 Helper functions for Loop versioning
6947 ---------------------------------------------------------------------------*/
6949 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6950 of 'first'. Both of them are dominated by 'new_head' basic block. When
6951 'new_head' was created by 'second's incoming edge it received phi arguments
6952 on the edge by split_edge(). Later, additional edge 'e' was created to
6953 connect 'new_head' and 'first'. Now this routine adds phi args on this
6954 additional edge 'e' that new_head to second edge received as part of edge
6955 splitting. */
6957 static void
6958 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6959 basic_block new_head, edge e)
6961 gimple phi1, phi2;
6962 gimple_stmt_iterator psi1, psi2;
6963 tree def;
6964 edge e2 = find_edge (new_head, second);
6966 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6967 edge, we should always have an edge from NEW_HEAD to SECOND. */
6968 gcc_assert (e2 != NULL);
6970 /* Browse all 'second' basic block phi nodes and add phi args to
6971 edge 'e' for 'first' head. PHI args are always in correct order. */
6973 for (psi2 = gsi_start_phis (second),
6974 psi1 = gsi_start_phis (first);
6975 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6976 gsi_next (&psi2), gsi_next (&psi1))
6978 phi1 = gsi_stmt (psi1);
6979 phi2 = gsi_stmt (psi2);
6980 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6981 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
6986 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6987 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6988 the destination of the ELSE part. */
6990 static void
6991 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6992 basic_block second_head ATTRIBUTE_UNUSED,
6993 basic_block cond_bb, void *cond_e)
6995 gimple_stmt_iterator gsi;
6996 gimple new_cond_expr;
6997 tree cond_expr = (tree) cond_e;
6998 edge e0;
7000 /* Build new conditional expr */
7001 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7002 NULL_TREE, NULL_TREE);
7004 /* Add new cond in cond_bb. */
7005 gsi = gsi_last_bb (cond_bb);
7006 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7008 /* Adjust edges appropriately to connect new head with first head
7009 as well as second head. */
7010 e0 = single_succ_edge (cond_bb);
7011 e0->flags &= ~EDGE_FALLTHRU;
7012 e0->flags |= EDGE_FALSE_VALUE;
7015 struct cfg_hooks gimple_cfg_hooks = {
7016 "gimple",
7017 gimple_verify_flow_info,
7018 gimple_dump_bb, /* dump_bb */
7019 create_bb, /* create_basic_block */
7020 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7021 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7022 gimple_can_remove_branch_p, /* can_remove_branch_p */
7023 remove_bb, /* delete_basic_block */
7024 gimple_split_block, /* split_block */
7025 gimple_move_block_after, /* move_block_after */
7026 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7027 gimple_merge_blocks, /* merge_blocks */
7028 gimple_predict_edge, /* predict_edge */
7029 gimple_predicted_by_p, /* predicted_by_p */
7030 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7031 gimple_duplicate_bb, /* duplicate_block */
7032 gimple_split_edge, /* split_edge */
7033 gimple_make_forwarder_block, /* make_forward_block */
7034 NULL, /* tidy_fallthru_edge */
7035 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7036 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7037 gimple_flow_call_edges_add, /* flow_call_edges_add */
7038 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7039 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7040 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7041 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7042 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7043 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7044 flush_pending_stmts /* flush_pending_stmts */
7048 /* Split all critical edges. */
7050 static unsigned int
7051 split_critical_edges (void)
7053 basic_block bb;
7054 edge e;
7055 edge_iterator ei;
7057 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7058 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7059 mappings around the calls to split_edge. */
7060 start_recording_case_labels ();
7061 FOR_ALL_BB (bb)
7063 FOR_EACH_EDGE (e, ei, bb->succs)
7065 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7066 split_edge (e);
7067 /* PRE inserts statements to edges and expects that
7068 since split_critical_edges was done beforehand, committing edge
7069 insertions will not split more edges. In addition to critical
7070 edges we must split edges that have multiple successors and
7071 end by control flow statements, such as RESX.
7072 Go ahead and split them too. This matches the logic in
7073 gimple_find_edge_insert_loc. */
7074 else if ((!single_pred_p (e->dest)
7075 || !gimple_seq_empty_p (phi_nodes (e->dest))
7076 || e->dest == EXIT_BLOCK_PTR)
7077 && e->src != ENTRY_BLOCK_PTR
7078 && !(e->flags & EDGE_ABNORMAL))
7080 gimple_stmt_iterator gsi;
7082 gsi = gsi_last_bb (e->src);
7083 if (!gsi_end_p (gsi)
7084 && stmt_ends_bb_p (gsi_stmt (gsi))
7085 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7086 split_edge (e);
7090 end_recording_case_labels ();
7091 return 0;
7094 struct gimple_opt_pass pass_split_crit_edges =
7097 GIMPLE_PASS,
7098 "crited", /* name */
7099 NULL, /* gate */
7100 split_critical_edges, /* execute */
7101 NULL, /* sub */
7102 NULL, /* next */
7103 0, /* static_pass_number */
7104 TV_TREE_SPLIT_EDGES, /* tv_id */
7105 PROP_cfg, /* properties required */
7106 PROP_no_crit_edges, /* properties_provided */
7107 0, /* properties_destroyed */
7108 0, /* todo_flags_start */
7109 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7114 /* Build a ternary operation and gimplify it. Emit code before GSI.
7115 Return the gimple_val holding the result. */
7117 tree
7118 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7119 tree type, tree a, tree b, tree c)
7121 tree ret;
7122 location_t loc = gimple_location (gsi_stmt (*gsi));
7124 ret = fold_build3_loc (loc, code, type, a, b, c);
7125 STRIP_NOPS (ret);
7127 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7128 GSI_SAME_STMT);
7131 /* Build a binary operation and gimplify it. Emit code before GSI.
7132 Return the gimple_val holding the result. */
7134 tree
7135 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7136 tree type, tree a, tree b)
7138 tree ret;
7140 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7141 STRIP_NOPS (ret);
7143 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7144 GSI_SAME_STMT);
7147 /* Build a unary operation and gimplify it. Emit code before GSI.
7148 Return the gimple_val holding the result. */
7150 tree
7151 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7152 tree a)
7154 tree ret;
7156 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7157 STRIP_NOPS (ret);
7159 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7160 GSI_SAME_STMT);
7165 /* Emit return warnings. */
7167 static unsigned int
7168 execute_warn_function_return (void)
7170 source_location location;
7171 gimple last;
7172 edge e;
7173 edge_iterator ei;
7175 /* If we have a path to EXIT, then we do return. */
7176 if (TREE_THIS_VOLATILE (cfun->decl)
7177 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7179 location = UNKNOWN_LOCATION;
7180 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7182 last = last_stmt (e->src);
7183 if (gimple_code (last) == GIMPLE_RETURN
7184 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7185 break;
7187 if (location == UNKNOWN_LOCATION)
7188 location = cfun->function_end_locus;
7189 warning_at (location, 0, "%<noreturn%> function does return");
7192 /* If we see "return;" in some basic block, then we do reach the end
7193 without returning a value. */
7194 else if (warn_return_type
7195 && !TREE_NO_WARNING (cfun->decl)
7196 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7197 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7199 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7201 gimple last = last_stmt (e->src);
7202 if (gimple_code (last) == GIMPLE_RETURN
7203 && gimple_return_retval (last) == NULL
7204 && !gimple_no_warning_p (last))
7206 location = gimple_location (last);
7207 if (location == UNKNOWN_LOCATION)
7208 location = cfun->function_end_locus;
7209 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7210 TREE_NO_WARNING (cfun->decl) = 1;
7211 break;
7215 return 0;
7219 /* Given a basic block B which ends with a conditional and has
7220 precisely two successors, determine which of the edges is taken if
7221 the conditional is true and which is taken if the conditional is
7222 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7224 void
7225 extract_true_false_edges_from_block (basic_block b,
7226 edge *true_edge,
7227 edge *false_edge)
7229 edge e = EDGE_SUCC (b, 0);
7231 if (e->flags & EDGE_TRUE_VALUE)
7233 *true_edge = e;
7234 *false_edge = EDGE_SUCC (b, 1);
7236 else
7238 *false_edge = e;
7239 *true_edge = EDGE_SUCC (b, 1);
7243 struct gimple_opt_pass pass_warn_function_return =
7246 GIMPLE_PASS,
7247 "*warn_function_return", /* name */
7248 NULL, /* gate */
7249 execute_warn_function_return, /* execute */
7250 NULL, /* sub */
7251 NULL, /* next */
7252 0, /* static_pass_number */
7253 TV_NONE, /* tv_id */
7254 PROP_cfg, /* properties_required */
7255 0, /* properties_provided */
7256 0, /* properties_destroyed */
7257 0, /* todo_flags_start */
7258 0 /* todo_flags_finish */
7262 /* Emit noreturn warnings. */
7264 static unsigned int
7265 execute_warn_function_noreturn (void)
7267 if (warn_missing_noreturn
7268 && !TREE_THIS_VOLATILE (cfun->decl)
7269 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7270 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7271 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7272 "function might be possible candidate "
7273 "for attribute %<noreturn%>");
7274 return 0;
7277 struct gimple_opt_pass pass_warn_function_noreturn =
7280 GIMPLE_PASS,
7281 "*warn_function_noreturn", /* name */
7282 NULL, /* gate */
7283 execute_warn_function_noreturn, /* execute */
7284 NULL, /* sub */
7285 NULL, /* next */
7286 0, /* static_pass_number */
7287 TV_NONE, /* tv_id */
7288 PROP_cfg, /* properties_required */
7289 0, /* properties_provided */
7290 0, /* properties_destroyed */
7291 0, /* todo_flags_start */
7292 0 /* todo_flags_finish */
7297 /* Walk a gimplified function and warn for functions whose return value is
7298 ignored and attribute((warn_unused_result)) is set. This is done before
7299 inlining, so we don't have to worry about that. */
7301 static void
7302 do_warn_unused_result (gimple_seq seq)
7304 tree fdecl, ftype;
7305 gimple_stmt_iterator i;
7307 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7309 gimple g = gsi_stmt (i);
7311 switch (gimple_code (g))
7313 case GIMPLE_BIND:
7314 do_warn_unused_result (gimple_bind_body (g));
7315 break;
7316 case GIMPLE_TRY:
7317 do_warn_unused_result (gimple_try_eval (g));
7318 do_warn_unused_result (gimple_try_cleanup (g));
7319 break;
7320 case GIMPLE_CATCH:
7321 do_warn_unused_result (gimple_catch_handler (g));
7322 break;
7323 case GIMPLE_EH_FILTER:
7324 do_warn_unused_result (gimple_eh_filter_failure (g));
7325 break;
7327 case GIMPLE_CALL:
7328 if (gimple_call_lhs (g))
7329 break;
7331 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7332 LHS. All calls whose value is ignored should be
7333 represented like this. Look for the attribute. */
7334 fdecl = gimple_call_fndecl (g);
7335 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7337 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7339 location_t loc = gimple_location (g);
7341 if (fdecl)
7342 warning_at (loc, OPT_Wunused_result,
7343 "ignoring return value of %qD, "
7344 "declared with attribute warn_unused_result",
7345 fdecl);
7346 else
7347 warning_at (loc, OPT_Wunused_result,
7348 "ignoring return value of function "
7349 "declared with attribute warn_unused_result");
7351 break;
7353 default:
7354 /* Not a container, not a call, or a call whose value is used. */
7355 break;
7360 static unsigned int
7361 run_warn_unused_result (void)
7363 do_warn_unused_result (gimple_body (current_function_decl));
7364 return 0;
7367 static bool
7368 gate_warn_unused_result (void)
7370 return flag_warn_unused_result;
7373 struct gimple_opt_pass pass_warn_unused_result =
7376 GIMPLE_PASS,
7377 "*warn_unused_result", /* name */
7378 gate_warn_unused_result, /* gate */
7379 run_warn_unused_result, /* execute */
7380 NULL, /* sub */
7381 NULL, /* next */
7382 0, /* static_pass_number */
7383 TV_NONE, /* tv_id */
7384 PROP_gimple_any, /* properties_required */
7385 0, /* properties_provided */
7386 0, /* properties_destroyed */
7387 0, /* todo_flags_start */
7388 0, /* todo_flags_finish */