Simplify convert_modes, ignoring invalid old modes for CONST_INTs.
[official-gcc.git] / gcc / tree-cfg.c
blob94be3610c9102e34613e917e1ca3ae55837a5533
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44 #include "tree-ssa-live.h"
45 #include "omp-low.h"
46 #include "tree-cfgcleanup.h"
47 #include "wide-int.h"
48 #include "wide-int-print.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their CASE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 static struct pointer_map_t *edge_to_cases;
73 /* If we record edge_to_cases, this bitmap will hold indexes
74 of basic blocks that end in a GIMPLE_SWITCH which we touched
75 due to edge manipulations. */
77 static bitmap touched_switch_bbs;
79 /* CFG statistics. */
80 struct cfg_stats_d
82 long num_merged_labels;
85 static struct cfg_stats_d cfg_stats;
87 /* Nonzero if we found a computed goto while building basic blocks. */
88 static bool found_computed_goto;
90 /* Hash table to store last discriminator assigned for each locus. */
91 struct locus_discrim_map
93 location_t locus;
94 int discriminator;
97 /* Hashtable helpers. */
99 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
101 typedef locus_discrim_map value_type;
102 typedef locus_discrim_map compare_type;
103 static inline hashval_t hash (const value_type *);
104 static inline bool equal (const value_type *, const compare_type *);
107 /* Trivial hash function for a location_t. ITEM is a pointer to
108 a hash table entry that maps a location_t to a discriminator. */
110 inline hashval_t
111 locus_discrim_hasher::hash (const value_type *item)
113 return LOCATION_LINE (item->locus);
116 /* Equality function for the locus-to-discriminator map. A and B
117 point to the two hash table entries to compare. */
119 inline bool
120 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
122 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
125 static hash_table <locus_discrim_hasher> discriminator_per_locus;
127 /* Basic blocks and flowgraphs. */
128 static void make_blocks (gimple_seq);
129 static void factor_computed_gotos (void);
131 /* Edges. */
132 static void make_edges (void);
133 static void assign_discriminators (void);
134 static void make_cond_expr_edges (basic_block);
135 static void make_gimple_switch_edges (basic_block);
136 static void make_goto_expr_edges (basic_block);
137 static void make_gimple_asm_edges (basic_block);
138 static edge gimple_redirect_edge_and_branch (edge, basic_block);
139 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
140 static unsigned int split_critical_edges (void);
142 /* Various helpers. */
143 static inline bool stmt_starts_bb_p (gimple, gimple);
144 static int gimple_verify_flow_info (void);
145 static void gimple_make_forwarder_block (edge);
146 static gimple first_non_label_stmt (basic_block);
147 static bool verify_gimple_transaction (gimple);
149 /* Flowgraph optimization and cleanup. */
150 static void gimple_merge_blocks (basic_block, basic_block);
151 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
152 static void remove_bb (basic_block);
153 static edge find_taken_edge_computed_goto (basic_block, tree);
154 static edge find_taken_edge_cond_expr (basic_block, tree);
155 static edge find_taken_edge_switch_expr (basic_block, tree);
156 static tree find_case_label_for_value (gimple, tree);
158 void
159 init_empty_tree_cfg_for_function (struct function *fn)
161 /* Initialize the basic block array. */
162 init_flow (fn);
163 profile_status_for_function (fn) = PROFILE_ABSENT;
164 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
165 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
166 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
167 vec_safe_grow_cleared (basic_block_info_for_function (fn),
168 initial_cfg_capacity);
170 /* Build a mapping of labels to their associated blocks. */
171 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
172 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
173 initial_cfg_capacity);
175 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
176 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
177 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
178 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
180 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
181 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
182 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
183 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
186 void
187 init_empty_tree_cfg (void)
189 init_empty_tree_cfg_for_function (cfun);
192 /*---------------------------------------------------------------------------
193 Create basic blocks
194 ---------------------------------------------------------------------------*/
196 /* Entry point to the CFG builder for trees. SEQ is the sequence of
197 statements to be added to the flowgraph. */
199 static void
200 build_gimple_cfg (gimple_seq seq)
202 /* Register specific gimple functions. */
203 gimple_register_cfg_hooks ();
205 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
207 init_empty_tree_cfg ();
209 found_computed_goto = 0;
210 make_blocks (seq);
212 /* Computed gotos are hell to deal with, especially if there are
213 lots of them with a large number of destinations. So we factor
214 them to a common computed goto location before we build the
215 edge list. After we convert back to normal form, we will un-factor
216 the computed gotos since factoring introduces an unwanted jump. */
217 if (found_computed_goto)
218 factor_computed_gotos ();
220 /* Make sure there is always at least one block, even if it's empty. */
221 if (n_basic_blocks == NUM_FIXED_BLOCKS)
222 create_empty_bb (ENTRY_BLOCK_PTR);
224 /* Adjust the size of the array. */
225 if (basic_block_info->length () < (size_t) n_basic_blocks)
226 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
228 /* To speed up statement iterator walks, we first purge dead labels. */
229 cleanup_dead_labels ();
231 /* Group case nodes to reduce the number of edges.
232 We do this after cleaning up dead labels because otherwise we miss
233 a lot of obvious case merging opportunities. */
234 group_case_labels ();
236 /* Create the edges of the flowgraph. */
237 discriminator_per_locus.create (13);
238 make_edges ();
239 assign_discriminators ();
240 cleanup_dead_labels ();
241 discriminator_per_locus.dispose ();
244 static unsigned int
245 execute_build_cfg (void)
247 gimple_seq body = gimple_body (current_function_decl);
249 build_gimple_cfg (body);
250 gimple_set_body (current_function_decl, NULL);
251 if (dump_file && (dump_flags & TDF_DETAILS))
253 fprintf (dump_file, "Scope blocks:\n");
254 dump_scope_blocks (dump_file, dump_flags);
256 cleanup_tree_cfg ();
257 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
258 return 0;
261 namespace {
263 const pass_data pass_data_build_cfg =
265 GIMPLE_PASS, /* type */
266 "cfg", /* name */
267 OPTGROUP_NONE, /* optinfo_flags */
268 false, /* has_gate */
269 true, /* has_execute */
270 TV_TREE_CFG, /* tv_id */
271 PROP_gimple_leh, /* properties_required */
272 ( PROP_cfg | PROP_loops ), /* properties_provided */
273 0, /* properties_destroyed */
274 0, /* todo_flags_start */
275 TODO_verify_stmts, /* todo_flags_finish */
278 class pass_build_cfg : public gimple_opt_pass
280 public:
281 pass_build_cfg (gcc::context *ctxt)
282 : gimple_opt_pass (pass_data_build_cfg, ctxt)
285 /* opt_pass methods: */
286 unsigned int execute () { return execute_build_cfg (); }
288 }; // class pass_build_cfg
290 } // anon namespace
292 gimple_opt_pass *
293 make_pass_build_cfg (gcc::context *ctxt)
295 return new pass_build_cfg (ctxt);
299 /* Return true if T is a computed goto. */
301 static bool
302 computed_goto_p (gimple t)
304 return (gimple_code (t) == GIMPLE_GOTO
305 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
309 /* Search the CFG for any computed gotos. If found, factor them to a
310 common computed goto site. Also record the location of that site so
311 that we can un-factor the gotos after we have converted back to
312 normal form. */
314 static void
315 factor_computed_gotos (void)
317 basic_block bb;
318 tree factored_label_decl = NULL;
319 tree var = NULL;
320 gimple factored_computed_goto_label = NULL;
321 gimple factored_computed_goto = NULL;
323 /* We know there are one or more computed gotos in this function.
324 Examine the last statement in each basic block to see if the block
325 ends with a computed goto. */
327 FOR_EACH_BB (bb)
329 gimple_stmt_iterator gsi = gsi_last_bb (bb);
330 gimple last;
332 if (gsi_end_p (gsi))
333 continue;
335 last = gsi_stmt (gsi);
337 /* Ignore the computed goto we create when we factor the original
338 computed gotos. */
339 if (last == factored_computed_goto)
340 continue;
342 /* If the last statement is a computed goto, factor it. */
343 if (computed_goto_p (last))
345 gimple assignment;
347 /* The first time we find a computed goto we need to create
348 the factored goto block and the variable each original
349 computed goto will use for their goto destination. */
350 if (!factored_computed_goto)
352 basic_block new_bb = create_empty_bb (bb);
353 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
355 /* Create the destination of the factored goto. Each original
356 computed goto will put its desired destination into this
357 variable and jump to the label we create immediately
358 below. */
359 var = create_tmp_var (ptr_type_node, "gotovar");
361 /* Build a label for the new block which will contain the
362 factored computed goto. */
363 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
364 factored_computed_goto_label
365 = gimple_build_label (factored_label_decl);
366 gsi_insert_after (&new_gsi, factored_computed_goto_label,
367 GSI_NEW_STMT);
369 /* Build our new computed goto. */
370 factored_computed_goto = gimple_build_goto (var);
371 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
374 /* Copy the original computed goto's destination into VAR. */
375 assignment = gimple_build_assign (var, gimple_goto_dest (last));
376 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
378 /* And re-vector the computed goto to the new destination. */
379 gimple_goto_set_dest (last, factored_label_decl);
385 /* Build a flowgraph for the sequence of stmts SEQ. */
387 static void
388 make_blocks (gimple_seq seq)
390 gimple_stmt_iterator i = gsi_start (seq);
391 gimple stmt = NULL;
392 bool start_new_block = true;
393 bool first_stmt_of_seq = true;
394 basic_block bb = ENTRY_BLOCK_PTR;
396 while (!gsi_end_p (i))
398 gimple prev_stmt;
400 prev_stmt = stmt;
401 stmt = gsi_stmt (i);
403 /* If the statement starts a new basic block or if we have determined
404 in a previous pass that we need to create a new block for STMT, do
405 so now. */
406 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
408 if (!first_stmt_of_seq)
409 gsi_split_seq_before (&i, &seq);
410 bb = create_basic_block (seq, NULL, bb);
411 start_new_block = false;
414 /* Now add STMT to BB and create the subgraphs for special statement
415 codes. */
416 gimple_set_bb (stmt, bb);
418 if (computed_goto_p (stmt))
419 found_computed_goto = true;
421 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
422 next iteration. */
423 if (stmt_ends_bb_p (stmt))
425 /* If the stmt can make abnormal goto use a new temporary
426 for the assignment to the LHS. This makes sure the old value
427 of the LHS is available on the abnormal edge. Otherwise
428 we will end up with overlapping life-ranges for abnormal
429 SSA names. */
430 if (gimple_has_lhs (stmt)
431 && stmt_can_make_abnormal_goto (stmt)
432 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
434 tree lhs = gimple_get_lhs (stmt);
435 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
436 gimple s = gimple_build_assign (lhs, tmp);
437 gimple_set_location (s, gimple_location (stmt));
438 gimple_set_block (s, gimple_block (stmt));
439 gimple_set_lhs (stmt, tmp);
440 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
441 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
442 DECL_GIMPLE_REG_P (tmp) = 1;
443 gsi_insert_after (&i, s, GSI_SAME_STMT);
445 start_new_block = true;
448 gsi_next (&i);
449 first_stmt_of_seq = false;
454 /* Create and return a new empty basic block after bb AFTER. */
456 static basic_block
457 create_bb (void *h, void *e, basic_block after)
459 basic_block bb;
461 gcc_assert (!e);
463 /* Create and initialize a new basic block. Since alloc_block uses
464 GC allocation that clears memory to allocate a basic block, we do
465 not have to clear the newly allocated basic block here. */
466 bb = alloc_block ();
468 bb->index = last_basic_block;
469 bb->flags = BB_NEW;
470 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
472 /* Add the new block to the linked list of blocks. */
473 link_block (bb, after);
475 /* Grow the basic block array if needed. */
476 if ((size_t) last_basic_block == basic_block_info->length ())
478 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
479 vec_safe_grow_cleared (basic_block_info, new_size);
482 /* Add the newly created block to the array. */
483 SET_BASIC_BLOCK (last_basic_block, bb);
485 n_basic_blocks++;
486 last_basic_block++;
488 return bb;
492 /*---------------------------------------------------------------------------
493 Edge creation
494 ---------------------------------------------------------------------------*/
496 /* Fold COND_EXPR_COND of each COND_EXPR. */
498 void
499 fold_cond_expr_cond (void)
501 basic_block bb;
503 FOR_EACH_BB (bb)
505 gimple stmt = last_stmt (bb);
507 if (stmt && gimple_code (stmt) == GIMPLE_COND)
509 location_t loc = gimple_location (stmt);
510 tree cond;
511 bool zerop, onep;
513 fold_defer_overflow_warnings ();
514 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
515 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
516 if (cond)
518 zerop = integer_zerop (cond);
519 onep = integer_onep (cond);
521 else
522 zerop = onep = false;
524 fold_undefer_overflow_warnings (zerop || onep,
525 stmt,
526 WARN_STRICT_OVERFLOW_CONDITIONAL);
527 if (zerop)
528 gimple_cond_make_false (stmt);
529 else if (onep)
530 gimple_cond_make_true (stmt);
535 /* Join all the blocks in the flowgraph. */
537 static void
538 make_edges (void)
540 basic_block bb;
541 struct omp_region *cur_region = NULL;
543 /* Create an edge from entry to the first block with executable
544 statements in it. */
545 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
547 /* Traverse the basic block array placing edges. */
548 FOR_EACH_BB (bb)
550 gimple last = last_stmt (bb);
551 bool fallthru;
553 if (last)
555 enum gimple_code code = gimple_code (last);
556 switch (code)
558 case GIMPLE_GOTO:
559 make_goto_expr_edges (bb);
560 fallthru = false;
561 break;
562 case GIMPLE_RETURN:
563 make_edge (bb, EXIT_BLOCK_PTR, 0);
564 fallthru = false;
565 break;
566 case GIMPLE_COND:
567 make_cond_expr_edges (bb);
568 fallthru = false;
569 break;
570 case GIMPLE_SWITCH:
571 make_gimple_switch_edges (bb);
572 fallthru = false;
573 break;
574 case GIMPLE_RESX:
575 make_eh_edges (last);
576 fallthru = false;
577 break;
578 case GIMPLE_EH_DISPATCH:
579 fallthru = make_eh_dispatch_edges (last);
580 break;
582 case GIMPLE_CALL:
583 /* If this function receives a nonlocal goto, then we need to
584 make edges from this call site to all the nonlocal goto
585 handlers. */
586 if (stmt_can_make_abnormal_goto (last))
587 make_abnormal_goto_edges (bb, true);
589 /* If this statement has reachable exception handlers, then
590 create abnormal edges to them. */
591 make_eh_edges (last);
593 /* BUILTIN_RETURN is really a return statement. */
594 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
595 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
596 /* Some calls are known not to return. */
597 else
598 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
599 break;
601 case GIMPLE_ASSIGN:
602 /* A GIMPLE_ASSIGN may throw internally and thus be considered
603 control-altering. */
604 if (is_ctrl_altering_stmt (last))
605 make_eh_edges (last);
606 fallthru = true;
607 break;
609 case GIMPLE_ASM:
610 make_gimple_asm_edges (bb);
611 fallthru = true;
612 break;
614 CASE_GIMPLE_OMP:
615 fallthru = make_gimple_omp_edges (bb, &cur_region);
616 break;
618 case GIMPLE_TRANSACTION:
620 tree abort_label = gimple_transaction_label (last);
621 if (abort_label)
622 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
623 fallthru = true;
625 break;
627 default:
628 gcc_assert (!stmt_ends_bb_p (last));
629 fallthru = true;
632 else
633 fallthru = true;
635 if (fallthru)
636 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
639 free_omp_regions ();
641 /* Fold COND_EXPR_COND of each COND_EXPR. */
642 fold_cond_expr_cond ();
645 /* Find the next available discriminator value for LOCUS. The
646 discriminator distinguishes among several basic blocks that
647 share a common locus, allowing for more accurate sample-based
648 profiling. */
650 static int
651 next_discriminator_for_locus (location_t locus)
653 struct locus_discrim_map item;
654 struct locus_discrim_map **slot;
656 item.locus = locus;
657 item.discriminator = 0;
658 slot = discriminator_per_locus.find_slot_with_hash (
659 &item, LOCATION_LINE (locus), INSERT);
660 gcc_assert (slot);
661 if (*slot == HTAB_EMPTY_ENTRY)
663 *slot = XNEW (struct locus_discrim_map);
664 gcc_assert (*slot);
665 (*slot)->locus = locus;
666 (*slot)->discriminator = 0;
668 (*slot)->discriminator++;
669 return (*slot)->discriminator;
672 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
674 static bool
675 same_line_p (location_t locus1, location_t locus2)
677 expanded_location from, to;
679 if (locus1 == locus2)
680 return true;
682 from = expand_location (locus1);
683 to = expand_location (locus2);
685 if (from.line != to.line)
686 return false;
687 if (from.file == to.file)
688 return true;
689 return (from.file != NULL
690 && to.file != NULL
691 && filename_cmp (from.file, to.file) == 0);
694 /* Assign discriminators to each basic block. */
696 static void
697 assign_discriminators (void)
699 basic_block bb;
701 FOR_EACH_BB (bb)
703 edge e;
704 edge_iterator ei;
705 gimple last = last_stmt (bb);
706 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
708 if (locus == UNKNOWN_LOCATION)
709 continue;
711 FOR_EACH_EDGE (e, ei, bb->succs)
713 gimple first = first_non_label_stmt (e->dest);
714 gimple last = last_stmt (e->dest);
715 if ((first && same_line_p (locus, gimple_location (first)))
716 || (last && same_line_p (locus, gimple_location (last))))
718 if (e->dest->discriminator != 0 && bb->discriminator == 0)
719 bb->discriminator = next_discriminator_for_locus (locus);
720 else
721 e->dest->discriminator = next_discriminator_for_locus (locus);
727 /* Create the edges for a GIMPLE_COND starting at block BB. */
729 static void
730 make_cond_expr_edges (basic_block bb)
732 gimple entry = last_stmt (bb);
733 gimple then_stmt, else_stmt;
734 basic_block then_bb, else_bb;
735 tree then_label, else_label;
736 edge e;
738 gcc_assert (entry);
739 gcc_assert (gimple_code (entry) == GIMPLE_COND);
741 /* Entry basic blocks for each component. */
742 then_label = gimple_cond_true_label (entry);
743 else_label = gimple_cond_false_label (entry);
744 then_bb = label_to_block (then_label);
745 else_bb = label_to_block (else_label);
746 then_stmt = first_stmt (then_bb);
747 else_stmt = first_stmt (else_bb);
749 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
750 e->goto_locus = gimple_location (then_stmt);
751 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
752 if (e)
753 e->goto_locus = gimple_location (else_stmt);
755 /* We do not need the labels anymore. */
756 gimple_cond_set_true_label (entry, NULL_TREE);
757 gimple_cond_set_false_label (entry, NULL_TREE);
761 /* Called for each element in the hash table (P) as we delete the
762 edge to cases hash table.
764 Clear all the TREE_CHAINs to prevent problems with copying of
765 SWITCH_EXPRs and structure sharing rules, then free the hash table
766 element. */
768 static bool
769 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
770 void *data ATTRIBUTE_UNUSED)
772 tree t, next;
774 for (t = (tree) *value; t; t = next)
776 next = CASE_CHAIN (t);
777 CASE_CHAIN (t) = NULL;
780 *value = NULL;
781 return true;
784 /* Start recording information mapping edges to case labels. */
786 void
787 start_recording_case_labels (void)
789 gcc_assert (edge_to_cases == NULL);
790 edge_to_cases = pointer_map_create ();
791 touched_switch_bbs = BITMAP_ALLOC (NULL);
794 /* Return nonzero if we are recording information for case labels. */
796 static bool
797 recording_case_labels_p (void)
799 return (edge_to_cases != NULL);
802 /* Stop recording information mapping edges to case labels and
803 remove any information we have recorded. */
804 void
805 end_recording_case_labels (void)
807 bitmap_iterator bi;
808 unsigned i;
809 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
810 pointer_map_destroy (edge_to_cases);
811 edge_to_cases = NULL;
812 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
814 basic_block bb = BASIC_BLOCK (i);
815 if (bb)
817 gimple stmt = last_stmt (bb);
818 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
819 group_case_labels_stmt (stmt);
822 BITMAP_FREE (touched_switch_bbs);
825 /* If we are inside a {start,end}_recording_cases block, then return
826 a chain of CASE_LABEL_EXPRs from T which reference E.
828 Otherwise return NULL. */
830 static tree
831 get_cases_for_edge (edge e, gimple t)
833 void **slot;
834 size_t i, n;
836 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
837 chains available. Return NULL so the caller can detect this case. */
838 if (!recording_case_labels_p ())
839 return NULL;
841 slot = pointer_map_contains (edge_to_cases, e);
842 if (slot)
843 return (tree) *slot;
845 /* If we did not find E in the hash table, then this must be the first
846 time we have been queried for information about E & T. Add all the
847 elements from T to the hash table then perform the query again. */
849 n = gimple_switch_num_labels (t);
850 for (i = 0; i < n; i++)
852 tree elt = gimple_switch_label (t, i);
853 tree lab = CASE_LABEL (elt);
854 basic_block label_bb = label_to_block (lab);
855 edge this_edge = find_edge (e->src, label_bb);
857 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
858 a new chain. */
859 slot = pointer_map_insert (edge_to_cases, this_edge);
860 CASE_CHAIN (elt) = (tree) *slot;
861 *slot = elt;
864 return (tree) *pointer_map_contains (edge_to_cases, e);
867 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
869 static void
870 make_gimple_switch_edges (basic_block bb)
872 gimple entry = last_stmt (bb);
873 size_t i, n;
875 n = gimple_switch_num_labels (entry);
877 for (i = 0; i < n; ++i)
879 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
880 basic_block label_bb = label_to_block (lab);
881 make_edge (bb, label_bb, 0);
886 /* Return the basic block holding label DEST. */
888 basic_block
889 label_to_block_fn (struct function *ifun, tree dest)
891 int uid = LABEL_DECL_UID (dest);
893 /* We would die hard when faced by an undefined label. Emit a label to
894 the very first basic block. This will hopefully make even the dataflow
895 and undefined variable warnings quite right. */
896 if (seen_error () && uid < 0)
898 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
899 gimple stmt;
901 stmt = gimple_build_label (dest);
902 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
903 uid = LABEL_DECL_UID (dest);
905 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
906 return NULL;
907 return (*ifun->cfg->x_label_to_block_map)[uid];
910 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
911 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
913 void
914 make_abnormal_goto_edges (basic_block bb, bool for_call)
916 basic_block target_bb;
917 gimple_stmt_iterator gsi;
919 FOR_EACH_BB (target_bb)
921 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
923 gimple label_stmt = gsi_stmt (gsi);
924 tree target;
926 if (gimple_code (label_stmt) != GIMPLE_LABEL)
927 break;
929 target = gimple_label_label (label_stmt);
931 /* Make an edge to every label block that has been marked as a
932 potential target for a computed goto or a non-local goto. */
933 if ((FORCED_LABEL (target) && !for_call)
934 || (DECL_NONLOCAL (target) && for_call))
936 make_edge (bb, target_bb, EDGE_ABNORMAL);
937 break;
940 if (!gsi_end_p (gsi)
941 && is_gimple_debug (gsi_stmt (gsi)))
942 gsi_next_nondebug (&gsi);
943 if (!gsi_end_p (gsi))
945 /* Make an edge to every setjmp-like call. */
946 gimple call_stmt = gsi_stmt (gsi);
947 if (is_gimple_call (call_stmt)
948 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
949 make_edge (bb, target_bb, EDGE_ABNORMAL);
954 /* Create edges for a goto statement at block BB. */
956 static void
957 make_goto_expr_edges (basic_block bb)
959 gimple_stmt_iterator last = gsi_last_bb (bb);
960 gimple goto_t = gsi_stmt (last);
962 /* A simple GOTO creates normal edges. */
963 if (simple_goto_p (goto_t))
965 tree dest = gimple_goto_dest (goto_t);
966 basic_block label_bb = label_to_block (dest);
967 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
968 e->goto_locus = gimple_location (goto_t);
969 gsi_remove (&last, true);
970 return;
973 /* A computed GOTO creates abnormal edges. */
974 make_abnormal_goto_edges (bb, false);
977 /* Create edges for an asm statement with labels at block BB. */
979 static void
980 make_gimple_asm_edges (basic_block bb)
982 gimple stmt = last_stmt (bb);
983 int i, n = gimple_asm_nlabels (stmt);
985 for (i = 0; i < n; ++i)
987 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
988 basic_block label_bb = label_to_block (label);
989 make_edge (bb, label_bb, 0);
993 /*---------------------------------------------------------------------------
994 Flowgraph analysis
995 ---------------------------------------------------------------------------*/
997 /* Cleanup useless labels in basic blocks. This is something we wish
998 to do early because it allows us to group case labels before creating
999 the edges for the CFG, and it speeds up block statement iterators in
1000 all passes later on.
1001 We rerun this pass after CFG is created, to get rid of the labels that
1002 are no longer referenced. After then we do not run it any more, since
1003 (almost) no new labels should be created. */
1005 /* A map from basic block index to the leading label of that block. */
1006 static struct label_record
1008 /* The label. */
1009 tree label;
1011 /* True if the label is referenced from somewhere. */
1012 bool used;
1013 } *label_for_bb;
1015 /* Given LABEL return the first label in the same basic block. */
1017 static tree
1018 main_block_label (tree label)
1020 basic_block bb = label_to_block (label);
1021 tree main_label = label_for_bb[bb->index].label;
1023 /* label_to_block possibly inserted undefined label into the chain. */
1024 if (!main_label)
1026 label_for_bb[bb->index].label = label;
1027 main_label = label;
1030 label_for_bb[bb->index].used = true;
1031 return main_label;
1034 /* Clean up redundant labels within the exception tree. */
1036 static void
1037 cleanup_dead_labels_eh (void)
1039 eh_landing_pad lp;
1040 eh_region r;
1041 tree lab;
1042 int i;
1044 if (cfun->eh == NULL)
1045 return;
1047 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1048 if (lp && lp->post_landing_pad)
1050 lab = main_block_label (lp->post_landing_pad);
1051 if (lab != lp->post_landing_pad)
1053 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1054 EH_LANDING_PAD_NR (lab) = lp->index;
1058 FOR_ALL_EH_REGION (r)
1059 switch (r->type)
1061 case ERT_CLEANUP:
1062 case ERT_MUST_NOT_THROW:
1063 break;
1065 case ERT_TRY:
1067 eh_catch c;
1068 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1070 lab = c->label;
1071 if (lab)
1072 c->label = main_block_label (lab);
1075 break;
1077 case ERT_ALLOWED_EXCEPTIONS:
1078 lab = r->u.allowed.label;
1079 if (lab)
1080 r->u.allowed.label = main_block_label (lab);
1081 break;
1086 /* Cleanup redundant labels. This is a three-step process:
1087 1) Find the leading label for each block.
1088 2) Redirect all references to labels to the leading labels.
1089 3) Cleanup all useless labels. */
1091 void
1092 cleanup_dead_labels (void)
1094 basic_block bb;
1095 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1097 /* Find a suitable label for each block. We use the first user-defined
1098 label if there is one, or otherwise just the first label we see. */
1099 FOR_EACH_BB (bb)
1101 gimple_stmt_iterator i;
1103 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1105 tree label;
1106 gimple stmt = gsi_stmt (i);
1108 if (gimple_code (stmt) != GIMPLE_LABEL)
1109 break;
1111 label = gimple_label_label (stmt);
1113 /* If we have not yet seen a label for the current block,
1114 remember this one and see if there are more labels. */
1115 if (!label_for_bb[bb->index].label)
1117 label_for_bb[bb->index].label = label;
1118 continue;
1121 /* If we did see a label for the current block already, but it
1122 is an artificially created label, replace it if the current
1123 label is a user defined label. */
1124 if (!DECL_ARTIFICIAL (label)
1125 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1127 label_for_bb[bb->index].label = label;
1128 break;
1133 /* Now redirect all jumps/branches to the selected label.
1134 First do so for each block ending in a control statement. */
1135 FOR_EACH_BB (bb)
1137 gimple stmt = last_stmt (bb);
1138 tree label, new_label;
1140 if (!stmt)
1141 continue;
1143 switch (gimple_code (stmt))
1145 case GIMPLE_COND:
1146 label = gimple_cond_true_label (stmt);
1147 if (label)
1149 new_label = main_block_label (label);
1150 if (new_label != label)
1151 gimple_cond_set_true_label (stmt, new_label);
1154 label = gimple_cond_false_label (stmt);
1155 if (label)
1157 new_label = main_block_label (label);
1158 if (new_label != label)
1159 gimple_cond_set_false_label (stmt, new_label);
1161 break;
1163 case GIMPLE_SWITCH:
1165 size_t i, n = gimple_switch_num_labels (stmt);
1167 /* Replace all destination labels. */
1168 for (i = 0; i < n; ++i)
1170 tree case_label = gimple_switch_label (stmt, i);
1171 label = CASE_LABEL (case_label);
1172 new_label = main_block_label (label);
1173 if (new_label != label)
1174 CASE_LABEL (case_label) = new_label;
1176 break;
1179 case GIMPLE_ASM:
1181 int i, n = gimple_asm_nlabels (stmt);
1183 for (i = 0; i < n; ++i)
1185 tree cons = gimple_asm_label_op (stmt, i);
1186 tree label = main_block_label (TREE_VALUE (cons));
1187 TREE_VALUE (cons) = label;
1189 break;
1192 /* We have to handle gotos until they're removed, and we don't
1193 remove them until after we've created the CFG edges. */
1194 case GIMPLE_GOTO:
1195 if (!computed_goto_p (stmt))
1197 label = gimple_goto_dest (stmt);
1198 new_label = main_block_label (label);
1199 if (new_label != label)
1200 gimple_goto_set_dest (stmt, new_label);
1202 break;
1204 case GIMPLE_TRANSACTION:
1206 tree label = gimple_transaction_label (stmt);
1207 if (label)
1209 tree new_label = main_block_label (label);
1210 if (new_label != label)
1211 gimple_transaction_set_label (stmt, new_label);
1214 break;
1216 default:
1217 break;
1221 /* Do the same for the exception region tree labels. */
1222 cleanup_dead_labels_eh ();
1224 /* Finally, purge dead labels. All user-defined labels and labels that
1225 can be the target of non-local gotos and labels which have their
1226 address taken are preserved. */
1227 FOR_EACH_BB (bb)
1229 gimple_stmt_iterator i;
1230 tree label_for_this_bb = label_for_bb[bb->index].label;
1232 if (!label_for_this_bb)
1233 continue;
1235 /* If the main label of the block is unused, we may still remove it. */
1236 if (!label_for_bb[bb->index].used)
1237 label_for_this_bb = NULL;
1239 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1241 tree label;
1242 gimple stmt = gsi_stmt (i);
1244 if (gimple_code (stmt) != GIMPLE_LABEL)
1245 break;
1247 label = gimple_label_label (stmt);
1249 if (label == label_for_this_bb
1250 || !DECL_ARTIFICIAL (label)
1251 || DECL_NONLOCAL (label)
1252 || FORCED_LABEL (label))
1253 gsi_next (&i);
1254 else
1255 gsi_remove (&i, true);
1259 free (label_for_bb);
1262 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1263 the ones jumping to the same label.
1264 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1266 void
1267 group_case_labels_stmt (gimple stmt)
1269 int old_size = gimple_switch_num_labels (stmt);
1270 int i, j, new_size = old_size;
1271 basic_block default_bb = NULL;
1273 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1275 /* Look for possible opportunities to merge cases. */
1276 i = 1;
1277 while (i < old_size)
1279 tree base_case, base_high;
1280 basic_block base_bb;
1282 base_case = gimple_switch_label (stmt, i);
1284 gcc_assert (base_case);
1285 base_bb = label_to_block (CASE_LABEL (base_case));
1287 /* Discard cases that have the same destination as the
1288 default case. */
1289 if (base_bb == default_bb)
1291 gimple_switch_set_label (stmt, i, NULL_TREE);
1292 i++;
1293 new_size--;
1294 continue;
1297 base_high = CASE_HIGH (base_case)
1298 ? CASE_HIGH (base_case)
1299 : CASE_LOW (base_case);
1300 i++;
1302 /* Try to merge case labels. Break out when we reach the end
1303 of the label vector or when we cannot merge the next case
1304 label with the current one. */
1305 while (i < old_size)
1307 tree merge_case = gimple_switch_label (stmt, i);
1308 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1309 wide_int bhp1 = wi::add (base_high, 1);
1311 /* Merge the cases if they jump to the same place,
1312 and their ranges are consecutive. */
1313 if (merge_bb == base_bb
1314 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1316 base_high = CASE_HIGH (merge_case) ?
1317 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1318 CASE_HIGH (base_case) = base_high;
1319 gimple_switch_set_label (stmt, i, NULL_TREE);
1320 new_size--;
1321 i++;
1323 else
1324 break;
1328 /* Compress the case labels in the label vector, and adjust the
1329 length of the vector. */
1330 for (i = 0, j = 0; i < new_size; i++)
1332 while (! gimple_switch_label (stmt, j))
1333 j++;
1334 gimple_switch_set_label (stmt, i,
1335 gimple_switch_label (stmt, j++));
1338 gcc_assert (new_size <= old_size);
1339 gimple_switch_set_num_labels (stmt, new_size);
1342 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1343 and scan the sorted vector of cases. Combine the ones jumping to the
1344 same label. */
1346 void
1347 group_case_labels (void)
1349 basic_block bb;
1351 FOR_EACH_BB (bb)
1353 gimple stmt = last_stmt (bb);
1354 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1355 group_case_labels_stmt (stmt);
1359 /* Checks whether we can merge block B into block A. */
1361 static bool
1362 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1364 gimple stmt;
1365 gimple_stmt_iterator gsi;
1367 if (!single_succ_p (a))
1368 return false;
1370 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1371 return false;
1373 if (single_succ (a) != b)
1374 return false;
1376 if (!single_pred_p (b))
1377 return false;
1379 if (b == EXIT_BLOCK_PTR)
1380 return false;
1382 /* If A ends by a statement causing exceptions or something similar, we
1383 cannot merge the blocks. */
1384 stmt = last_stmt (a);
1385 if (stmt && stmt_ends_bb_p (stmt))
1386 return false;
1388 /* Do not allow a block with only a non-local label to be merged. */
1389 if (stmt
1390 && gimple_code (stmt) == GIMPLE_LABEL
1391 && DECL_NONLOCAL (gimple_label_label (stmt)))
1392 return false;
1394 /* Examine the labels at the beginning of B. */
1395 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1397 tree lab;
1398 stmt = gsi_stmt (gsi);
1399 if (gimple_code (stmt) != GIMPLE_LABEL)
1400 break;
1401 lab = gimple_label_label (stmt);
1403 /* Do not remove user forced labels or for -O0 any user labels. */
1404 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1405 return false;
1408 /* Protect the loop latches. */
1409 if (current_loops && b->loop_father->latch == b)
1410 return false;
1412 /* It must be possible to eliminate all phi nodes in B. If ssa form
1413 is not up-to-date and a name-mapping is registered, we cannot eliminate
1414 any phis. Symbols marked for renaming are never a problem though. */
1415 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1417 gimple phi = gsi_stmt (gsi);
1418 /* Technically only new names matter. */
1419 if (name_registered_for_update_p (PHI_RESULT (phi)))
1420 return false;
1423 /* When not optimizing, don't merge if we'd lose goto_locus. */
1424 if (!optimize
1425 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1427 location_t goto_locus = single_succ_edge (a)->goto_locus;
1428 gimple_stmt_iterator prev, next;
1429 prev = gsi_last_nondebug_bb (a);
1430 next = gsi_after_labels (b);
1431 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1432 gsi_next_nondebug (&next);
1433 if ((gsi_end_p (prev)
1434 || gimple_location (gsi_stmt (prev)) != goto_locus)
1435 && (gsi_end_p (next)
1436 || gimple_location (gsi_stmt (next)) != goto_locus))
1437 return false;
1440 return true;
1443 /* Replaces all uses of NAME by VAL. */
1445 void
1446 replace_uses_by (tree name, tree val)
1448 imm_use_iterator imm_iter;
1449 use_operand_p use;
1450 gimple stmt;
1451 edge e;
1453 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1455 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1457 replace_exp (use, val);
1459 if (gimple_code (stmt) == GIMPLE_PHI)
1461 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1462 if (e->flags & EDGE_ABNORMAL)
1464 /* This can only occur for virtual operands, since
1465 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1466 would prevent replacement. */
1467 gcc_checking_assert (virtual_operand_p (name));
1468 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1473 if (gimple_code (stmt) != GIMPLE_PHI)
1475 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1476 gimple orig_stmt = stmt;
1477 size_t i;
1479 /* Mark the block if we changed the last stmt in it. */
1480 if (cfgcleanup_altered_bbs
1481 && stmt_ends_bb_p (stmt))
1482 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1484 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1485 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1486 only change sth from non-invariant to invariant, and only
1487 when propagating constants. */
1488 if (is_gimple_min_invariant (val))
1489 for (i = 0; i < gimple_num_ops (stmt); i++)
1491 tree op = gimple_op (stmt, i);
1492 /* Operands may be empty here. For example, the labels
1493 of a GIMPLE_COND are nulled out following the creation
1494 of the corresponding CFG edges. */
1495 if (op && TREE_CODE (op) == ADDR_EXPR)
1496 recompute_tree_invariant_for_addr_expr (op);
1499 if (fold_stmt (&gsi))
1500 stmt = gsi_stmt (gsi);
1502 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1503 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1505 update_stmt (stmt);
1509 gcc_checking_assert (has_zero_uses (name));
1511 /* Also update the trees stored in loop structures. */
1512 if (current_loops)
1514 struct loop *loop;
1515 loop_iterator li;
1517 FOR_EACH_LOOP (li, loop, 0)
1519 substitute_in_loop_info (loop, name, val);
1524 /* Merge block B into block A. */
1526 static void
1527 gimple_merge_blocks (basic_block a, basic_block b)
1529 gimple_stmt_iterator last, gsi, psi;
1531 if (dump_file)
1532 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1534 /* Remove all single-valued PHI nodes from block B of the form
1535 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1536 gsi = gsi_last_bb (a);
1537 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1539 gimple phi = gsi_stmt (psi);
1540 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1541 gimple copy;
1542 bool may_replace_uses = (virtual_operand_p (def)
1543 || may_propagate_copy (def, use));
1545 /* In case we maintain loop closed ssa form, do not propagate arguments
1546 of loop exit phi nodes. */
1547 if (current_loops
1548 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1549 && !virtual_operand_p (def)
1550 && TREE_CODE (use) == SSA_NAME
1551 && a->loop_father != b->loop_father)
1552 may_replace_uses = false;
1554 if (!may_replace_uses)
1556 gcc_assert (!virtual_operand_p (def));
1558 /* Note that just emitting the copies is fine -- there is no problem
1559 with ordering of phi nodes. This is because A is the single
1560 predecessor of B, therefore results of the phi nodes cannot
1561 appear as arguments of the phi nodes. */
1562 copy = gimple_build_assign (def, use);
1563 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1564 remove_phi_node (&psi, false);
1566 else
1568 /* If we deal with a PHI for virtual operands, we can simply
1569 propagate these without fussing with folding or updating
1570 the stmt. */
1571 if (virtual_operand_p (def))
1573 imm_use_iterator iter;
1574 use_operand_p use_p;
1575 gimple stmt;
1577 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1578 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1579 SET_USE (use_p, use);
1581 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1582 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1584 else
1585 replace_uses_by (def, use);
1587 remove_phi_node (&psi, true);
1591 /* Ensure that B follows A. */
1592 move_block_after (b, a);
1594 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1595 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1597 /* Remove labels from B and set gimple_bb to A for other statements. */
1598 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1600 gimple stmt = gsi_stmt (gsi);
1601 if (gimple_code (stmt) == GIMPLE_LABEL)
1603 tree label = gimple_label_label (stmt);
1604 int lp_nr;
1606 gsi_remove (&gsi, false);
1608 /* Now that we can thread computed gotos, we might have
1609 a situation where we have a forced label in block B
1610 However, the label at the start of block B might still be
1611 used in other ways (think about the runtime checking for
1612 Fortran assigned gotos). So we can not just delete the
1613 label. Instead we move the label to the start of block A. */
1614 if (FORCED_LABEL (label))
1616 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1617 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1619 /* Other user labels keep around in a form of a debug stmt. */
1620 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1622 gimple dbg = gimple_build_debug_bind (label,
1623 integer_zero_node,
1624 stmt);
1625 gimple_debug_bind_reset_value (dbg);
1626 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1629 lp_nr = EH_LANDING_PAD_NR (label);
1630 if (lp_nr)
1632 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1633 lp->post_landing_pad = NULL;
1636 else
1638 gimple_set_bb (stmt, a);
1639 gsi_next (&gsi);
1643 /* Merge the sequences. */
1644 last = gsi_last_bb (a);
1645 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1646 set_bb_seq (b, NULL);
1648 if (cfgcleanup_altered_bbs)
1649 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1653 /* Return the one of two successors of BB that is not reachable by a
1654 complex edge, if there is one. Else, return BB. We use
1655 this in optimizations that use post-dominators for their heuristics,
1656 to catch the cases in C++ where function calls are involved. */
1658 basic_block
1659 single_noncomplex_succ (basic_block bb)
1661 edge e0, e1;
1662 if (EDGE_COUNT (bb->succs) != 2)
1663 return bb;
1665 e0 = EDGE_SUCC (bb, 0);
1666 e1 = EDGE_SUCC (bb, 1);
1667 if (e0->flags & EDGE_COMPLEX)
1668 return e1->dest;
1669 if (e1->flags & EDGE_COMPLEX)
1670 return e0->dest;
1672 return bb;
1675 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1677 void
1678 notice_special_calls (gimple call)
1680 int flags = gimple_call_flags (call);
1682 if (flags & ECF_MAY_BE_ALLOCA)
1683 cfun->calls_alloca = true;
1684 if (flags & ECF_RETURNS_TWICE)
1685 cfun->calls_setjmp = true;
1689 /* Clear flags set by notice_special_calls. Used by dead code removal
1690 to update the flags. */
1692 void
1693 clear_special_calls (void)
1695 cfun->calls_alloca = false;
1696 cfun->calls_setjmp = false;
1699 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1701 static void
1702 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1704 /* Since this block is no longer reachable, we can just delete all
1705 of its PHI nodes. */
1706 remove_phi_nodes (bb);
1708 /* Remove edges to BB's successors. */
1709 while (EDGE_COUNT (bb->succs) > 0)
1710 remove_edge (EDGE_SUCC (bb, 0));
1714 /* Remove statements of basic block BB. */
1716 static void
1717 remove_bb (basic_block bb)
1719 gimple_stmt_iterator i;
1721 if (dump_file)
1723 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1724 if (dump_flags & TDF_DETAILS)
1726 dump_bb (dump_file, bb, 0, dump_flags);
1727 fprintf (dump_file, "\n");
1731 if (current_loops)
1733 struct loop *loop = bb->loop_father;
1735 /* If a loop gets removed, clean up the information associated
1736 with it. */
1737 if (loop->latch == bb
1738 || loop->header == bb)
1739 free_numbers_of_iterations_estimates_loop (loop);
1742 /* Remove all the instructions in the block. */
1743 if (bb_seq (bb) != NULL)
1745 /* Walk backwards so as to get a chance to substitute all
1746 released DEFs into debug stmts. See
1747 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1748 details. */
1749 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1751 gimple stmt = gsi_stmt (i);
1752 if (gimple_code (stmt) == GIMPLE_LABEL
1753 && (FORCED_LABEL (gimple_label_label (stmt))
1754 || DECL_NONLOCAL (gimple_label_label (stmt))))
1756 basic_block new_bb;
1757 gimple_stmt_iterator new_gsi;
1759 /* A non-reachable non-local label may still be referenced.
1760 But it no longer needs to carry the extra semantics of
1761 non-locality. */
1762 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1764 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1765 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1768 new_bb = bb->prev_bb;
1769 new_gsi = gsi_start_bb (new_bb);
1770 gsi_remove (&i, false);
1771 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1773 else
1775 /* Release SSA definitions if we are in SSA. Note that we
1776 may be called when not in SSA. For example,
1777 final_cleanup calls this function via
1778 cleanup_tree_cfg. */
1779 if (gimple_in_ssa_p (cfun))
1780 release_defs (stmt);
1782 gsi_remove (&i, true);
1785 if (gsi_end_p (i))
1786 i = gsi_last_bb (bb);
1787 else
1788 gsi_prev (&i);
1792 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1793 bb->il.gimple.seq = NULL;
1794 bb->il.gimple.phi_nodes = NULL;
1798 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1799 predicate VAL, return the edge that will be taken out of the block.
1800 If VAL does not match a unique edge, NULL is returned. */
1802 edge
1803 find_taken_edge (basic_block bb, tree val)
1805 gimple stmt;
1807 stmt = last_stmt (bb);
1809 gcc_assert (stmt);
1810 gcc_assert (is_ctrl_stmt (stmt));
1812 if (val == NULL)
1813 return NULL;
1815 if (!is_gimple_min_invariant (val))
1816 return NULL;
1818 if (gimple_code (stmt) == GIMPLE_COND)
1819 return find_taken_edge_cond_expr (bb, val);
1821 if (gimple_code (stmt) == GIMPLE_SWITCH)
1822 return find_taken_edge_switch_expr (bb, val);
1824 if (computed_goto_p (stmt))
1826 /* Only optimize if the argument is a label, if the argument is
1827 not a label then we can not construct a proper CFG.
1829 It may be the case that we only need to allow the LABEL_REF to
1830 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1831 appear inside a LABEL_EXPR just to be safe. */
1832 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1833 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1834 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1835 return NULL;
1838 gcc_unreachable ();
1841 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1842 statement, determine which of the outgoing edges will be taken out of the
1843 block. Return NULL if either edge may be taken. */
1845 static edge
1846 find_taken_edge_computed_goto (basic_block bb, tree val)
1848 basic_block dest;
1849 edge e = NULL;
1851 dest = label_to_block (val);
1852 if (dest)
1854 e = find_edge (bb, dest);
1855 gcc_assert (e != NULL);
1858 return e;
1861 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1862 statement, determine which of the two edges will be taken out of the
1863 block. Return NULL if either edge may be taken. */
1865 static edge
1866 find_taken_edge_cond_expr (basic_block bb, tree val)
1868 edge true_edge, false_edge;
1870 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1872 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1873 return (integer_zerop (val) ? false_edge : true_edge);
1876 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1877 statement, determine which edge will be taken out of the block. Return
1878 NULL if any edge may be taken. */
1880 static edge
1881 find_taken_edge_switch_expr (basic_block bb, tree val)
1883 basic_block dest_bb;
1884 edge e;
1885 gimple switch_stmt;
1886 tree taken_case;
1888 switch_stmt = last_stmt (bb);
1889 taken_case = find_case_label_for_value (switch_stmt, val);
1890 dest_bb = label_to_block (CASE_LABEL (taken_case));
1892 e = find_edge (bb, dest_bb);
1893 gcc_assert (e);
1894 return e;
1898 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1899 We can make optimal use here of the fact that the case labels are
1900 sorted: We can do a binary search for a case matching VAL. */
1902 static tree
1903 find_case_label_for_value (gimple switch_stmt, tree val)
1905 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1906 tree default_case = gimple_switch_default_label (switch_stmt);
1908 for (low = 0, high = n; high - low > 1; )
1910 size_t i = (high + low) / 2;
1911 tree t = gimple_switch_label (switch_stmt, i);
1912 int cmp;
1914 /* Cache the result of comparing CASE_LOW and val. */
1915 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1917 if (cmp > 0)
1918 high = i;
1919 else
1920 low = i;
1922 if (CASE_HIGH (t) == NULL)
1924 /* A singe-valued case label. */
1925 if (cmp == 0)
1926 return t;
1928 else
1930 /* A case range. We can only handle integer ranges. */
1931 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1932 return t;
1936 return default_case;
1940 /* Dump a basic block on stderr. */
1942 void
1943 gimple_debug_bb (basic_block bb)
1945 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
1949 /* Dump basic block with index N on stderr. */
1951 basic_block
1952 gimple_debug_bb_n (int n)
1954 gimple_debug_bb (BASIC_BLOCK (n));
1955 return BASIC_BLOCK (n);
1959 /* Dump the CFG on stderr.
1961 FLAGS are the same used by the tree dumping functions
1962 (see TDF_* in dumpfile.h). */
1964 void
1965 gimple_debug_cfg (int flags)
1967 gimple_dump_cfg (stderr, flags);
1971 /* Dump the program showing basic block boundaries on the given FILE.
1973 FLAGS are the same used by the tree dumping functions (see TDF_* in
1974 tree.h). */
1976 void
1977 gimple_dump_cfg (FILE *file, int flags)
1979 if (flags & TDF_DETAILS)
1981 dump_function_header (file, current_function_decl, flags);
1982 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
1983 n_basic_blocks, n_edges, last_basic_block);
1985 brief_dump_cfg (file, flags | TDF_COMMENT);
1986 fprintf (file, "\n");
1989 if (flags & TDF_STATS)
1990 dump_cfg_stats (file);
1992 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
1996 /* Dump CFG statistics on FILE. */
1998 void
1999 dump_cfg_stats (FILE *file)
2001 static long max_num_merged_labels = 0;
2002 unsigned long size, total = 0;
2003 long num_edges;
2004 basic_block bb;
2005 const char * const fmt_str = "%-30s%-13s%12s\n";
2006 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2007 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2008 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2009 const char *funcname = current_function_name ();
2011 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2013 fprintf (file, "---------------------------------------------------------\n");
2014 fprintf (file, fmt_str, "", " Number of ", "Memory");
2015 fprintf (file, fmt_str, "", " instances ", "used ");
2016 fprintf (file, "---------------------------------------------------------\n");
2018 size = n_basic_blocks * sizeof (struct basic_block_def);
2019 total += size;
2020 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2021 SCALE (size), LABEL (size));
2023 num_edges = 0;
2024 FOR_EACH_BB (bb)
2025 num_edges += EDGE_COUNT (bb->succs);
2026 size = num_edges * sizeof (struct edge_def);
2027 total += size;
2028 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2030 fprintf (file, "---------------------------------------------------------\n");
2031 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2032 LABEL (total));
2033 fprintf (file, "---------------------------------------------------------\n");
2034 fprintf (file, "\n");
2036 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2037 max_num_merged_labels = cfg_stats.num_merged_labels;
2039 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2040 cfg_stats.num_merged_labels, max_num_merged_labels);
2042 fprintf (file, "\n");
2046 /* Dump CFG statistics on stderr. Keep extern so that it's always
2047 linked in the final executable. */
2049 DEBUG_FUNCTION void
2050 debug_cfg_stats (void)
2052 dump_cfg_stats (stderr);
2055 /*---------------------------------------------------------------------------
2056 Miscellaneous helpers
2057 ---------------------------------------------------------------------------*/
2059 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2060 flow. Transfers of control flow associated with EH are excluded. */
2062 static bool
2063 call_can_make_abnormal_goto (gimple t)
2065 /* If the function has no non-local labels, then a call cannot make an
2066 abnormal transfer of control. */
2067 if (!cfun->has_nonlocal_label
2068 && !cfun->calls_setjmp)
2069 return false;
2071 /* Likewise if the call has no side effects. */
2072 if (!gimple_has_side_effects (t))
2073 return false;
2075 /* Likewise if the called function is leaf. */
2076 if (gimple_call_flags (t) & ECF_LEAF)
2077 return false;
2079 return true;
2083 /* Return true if T can make an abnormal transfer of control flow.
2084 Transfers of control flow associated with EH are excluded. */
2086 bool
2087 stmt_can_make_abnormal_goto (gimple t)
2089 if (computed_goto_p (t))
2090 return true;
2091 if (is_gimple_call (t))
2092 return call_can_make_abnormal_goto (t);
2093 return false;
2097 /* Return true if T represents a stmt that always transfers control. */
2099 bool
2100 is_ctrl_stmt (gimple t)
2102 switch (gimple_code (t))
2104 case GIMPLE_COND:
2105 case GIMPLE_SWITCH:
2106 case GIMPLE_GOTO:
2107 case GIMPLE_RETURN:
2108 case GIMPLE_RESX:
2109 return true;
2110 default:
2111 return false;
2116 /* Return true if T is a statement that may alter the flow of control
2117 (e.g., a call to a non-returning function). */
2119 bool
2120 is_ctrl_altering_stmt (gimple t)
2122 gcc_assert (t);
2124 switch (gimple_code (t))
2126 case GIMPLE_CALL:
2128 int flags = gimple_call_flags (t);
2130 /* A call alters control flow if it can make an abnormal goto. */
2131 if (call_can_make_abnormal_goto (t))
2132 return true;
2134 /* A call also alters control flow if it does not return. */
2135 if (flags & ECF_NORETURN)
2136 return true;
2138 /* TM ending statements have backedges out of the transaction.
2139 Return true so we split the basic block containing them.
2140 Note that the TM_BUILTIN test is merely an optimization. */
2141 if ((flags & ECF_TM_BUILTIN)
2142 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2143 return true;
2145 /* BUILT_IN_RETURN call is same as return statement. */
2146 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2147 return true;
2149 break;
2151 case GIMPLE_EH_DISPATCH:
2152 /* EH_DISPATCH branches to the individual catch handlers at
2153 this level of a try or allowed-exceptions region. It can
2154 fallthru to the next statement as well. */
2155 return true;
2157 case GIMPLE_ASM:
2158 if (gimple_asm_nlabels (t) > 0)
2159 return true;
2160 break;
2162 CASE_GIMPLE_OMP:
2163 /* OpenMP directives alter control flow. */
2164 return true;
2166 case GIMPLE_TRANSACTION:
2167 /* A transaction start alters control flow. */
2168 return true;
2170 default:
2171 break;
2174 /* If a statement can throw, it alters control flow. */
2175 return stmt_can_throw_internal (t);
2179 /* Return true if T is a simple local goto. */
2181 bool
2182 simple_goto_p (gimple t)
2184 return (gimple_code (t) == GIMPLE_GOTO
2185 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2189 /* Return true if STMT should start a new basic block. PREV_STMT is
2190 the statement preceding STMT. It is used when STMT is a label or a
2191 case label. Labels should only start a new basic block if their
2192 previous statement wasn't a label. Otherwise, sequence of labels
2193 would generate unnecessary basic blocks that only contain a single
2194 label. */
2196 static inline bool
2197 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2199 if (stmt == NULL)
2200 return false;
2202 /* Labels start a new basic block only if the preceding statement
2203 wasn't a label of the same type. This prevents the creation of
2204 consecutive blocks that have nothing but a single label. */
2205 if (gimple_code (stmt) == GIMPLE_LABEL)
2207 /* Nonlocal and computed GOTO targets always start a new block. */
2208 if (DECL_NONLOCAL (gimple_label_label (stmt))
2209 || FORCED_LABEL (gimple_label_label (stmt)))
2210 return true;
2212 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2214 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2215 return true;
2217 cfg_stats.num_merged_labels++;
2218 return false;
2220 else
2221 return true;
2223 else if (gimple_code (stmt) == GIMPLE_CALL
2224 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2225 /* setjmp acts similar to a nonlocal GOTO target and thus should
2226 start a new block. */
2227 return true;
2229 return false;
2233 /* Return true if T should end a basic block. */
2235 bool
2236 stmt_ends_bb_p (gimple t)
2238 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2241 /* Remove block annotations and other data structures. */
2243 void
2244 delete_tree_cfg_annotations (void)
2246 vec_free (label_to_block_map);
2250 /* Return the first statement in basic block BB. */
2252 gimple
2253 first_stmt (basic_block bb)
2255 gimple_stmt_iterator i = gsi_start_bb (bb);
2256 gimple stmt = NULL;
2258 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2260 gsi_next (&i);
2261 stmt = NULL;
2263 return stmt;
2266 /* Return the first non-label statement in basic block BB. */
2268 static gimple
2269 first_non_label_stmt (basic_block bb)
2271 gimple_stmt_iterator i = gsi_start_bb (bb);
2272 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2273 gsi_next (&i);
2274 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2277 /* Return the last statement in basic block BB. */
2279 gimple
2280 last_stmt (basic_block bb)
2282 gimple_stmt_iterator i = gsi_last_bb (bb);
2283 gimple stmt = NULL;
2285 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2287 gsi_prev (&i);
2288 stmt = NULL;
2290 return stmt;
2293 /* Return the last statement of an otherwise empty block. Return NULL
2294 if the block is totally empty, or if it contains more than one
2295 statement. */
2297 gimple
2298 last_and_only_stmt (basic_block bb)
2300 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2301 gimple last, prev;
2303 if (gsi_end_p (i))
2304 return NULL;
2306 last = gsi_stmt (i);
2307 gsi_prev_nondebug (&i);
2308 if (gsi_end_p (i))
2309 return last;
2311 /* Empty statements should no longer appear in the instruction stream.
2312 Everything that might have appeared before should be deleted by
2313 remove_useless_stmts, and the optimizers should just gsi_remove
2314 instead of smashing with build_empty_stmt.
2316 Thus the only thing that should appear here in a block containing
2317 one executable statement is a label. */
2318 prev = gsi_stmt (i);
2319 if (gimple_code (prev) == GIMPLE_LABEL)
2320 return last;
2321 else
2322 return NULL;
2325 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2327 static void
2328 reinstall_phi_args (edge new_edge, edge old_edge)
2330 edge_var_map_vector *v;
2331 edge_var_map *vm;
2332 int i;
2333 gimple_stmt_iterator phis;
2335 v = redirect_edge_var_map_vector (old_edge);
2336 if (!v)
2337 return;
2339 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2340 v->iterate (i, &vm) && !gsi_end_p (phis);
2341 i++, gsi_next (&phis))
2343 gimple phi = gsi_stmt (phis);
2344 tree result = redirect_edge_var_map_result (vm);
2345 tree arg = redirect_edge_var_map_def (vm);
2347 gcc_assert (result == gimple_phi_result (phi));
2349 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2352 redirect_edge_var_map_clear (old_edge);
2355 /* Returns the basic block after which the new basic block created
2356 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2357 near its "logical" location. This is of most help to humans looking
2358 at debugging dumps. */
2360 static basic_block
2361 split_edge_bb_loc (edge edge_in)
2363 basic_block dest = edge_in->dest;
2364 basic_block dest_prev = dest->prev_bb;
2366 if (dest_prev)
2368 edge e = find_edge (dest_prev, dest);
2369 if (e && !(e->flags & EDGE_COMPLEX))
2370 return edge_in->src;
2372 return dest_prev;
2375 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2376 Abort on abnormal edges. */
2378 static basic_block
2379 gimple_split_edge (edge edge_in)
2381 basic_block new_bb, after_bb, dest;
2382 edge new_edge, e;
2384 /* Abnormal edges cannot be split. */
2385 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2387 dest = edge_in->dest;
2389 after_bb = split_edge_bb_loc (edge_in);
2391 new_bb = create_empty_bb (after_bb);
2392 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2393 new_bb->count = edge_in->count;
2394 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2395 new_edge->probability = REG_BR_PROB_BASE;
2396 new_edge->count = edge_in->count;
2398 e = redirect_edge_and_branch (edge_in, new_bb);
2399 gcc_assert (e == edge_in);
2400 reinstall_phi_args (new_edge, e);
2402 return new_bb;
2406 /* Verify properties of the address expression T with base object BASE. */
2408 static tree
2409 verify_address (tree t, tree base)
2411 bool old_constant;
2412 bool old_side_effects;
2413 bool new_constant;
2414 bool new_side_effects;
2416 old_constant = TREE_CONSTANT (t);
2417 old_side_effects = TREE_SIDE_EFFECTS (t);
2419 recompute_tree_invariant_for_addr_expr (t);
2420 new_side_effects = TREE_SIDE_EFFECTS (t);
2421 new_constant = TREE_CONSTANT (t);
2423 if (old_constant != new_constant)
2425 error ("constant not recomputed when ADDR_EXPR changed");
2426 return t;
2428 if (old_side_effects != new_side_effects)
2430 error ("side effects not recomputed when ADDR_EXPR changed");
2431 return t;
2434 if (!(TREE_CODE (base) == VAR_DECL
2435 || TREE_CODE (base) == PARM_DECL
2436 || TREE_CODE (base) == RESULT_DECL))
2437 return NULL_TREE;
2439 if (DECL_GIMPLE_REG_P (base))
2441 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2442 return base;
2445 return NULL_TREE;
2448 /* Callback for walk_tree, check that all elements with address taken are
2449 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2450 inside a PHI node. */
2452 static tree
2453 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2455 tree t = *tp, x;
2457 if (TYPE_P (t))
2458 *walk_subtrees = 0;
2460 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2461 #define CHECK_OP(N, MSG) \
2462 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2463 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2465 switch (TREE_CODE (t))
2467 case SSA_NAME:
2468 if (SSA_NAME_IN_FREE_LIST (t))
2470 error ("SSA name in freelist but still referenced");
2471 return *tp;
2473 break;
2475 case INDIRECT_REF:
2476 error ("INDIRECT_REF in gimple IL");
2477 return t;
2479 case MEM_REF:
2480 x = TREE_OPERAND (t, 0);
2481 if (!POINTER_TYPE_P (TREE_TYPE (x))
2482 || !is_gimple_mem_ref_addr (x))
2484 error ("invalid first operand of MEM_REF");
2485 return x;
2487 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2488 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2490 error ("invalid offset operand of MEM_REF");
2491 return TREE_OPERAND (t, 1);
2493 if (TREE_CODE (x) == ADDR_EXPR
2494 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2495 return x;
2496 *walk_subtrees = 0;
2497 break;
2499 case ASSERT_EXPR:
2500 x = fold (ASSERT_EXPR_COND (t));
2501 if (x == boolean_false_node)
2503 error ("ASSERT_EXPR with an always-false condition");
2504 return *tp;
2506 break;
2508 case MODIFY_EXPR:
2509 error ("MODIFY_EXPR not expected while having tuples");
2510 return *tp;
2512 case ADDR_EXPR:
2514 tree tem;
2516 gcc_assert (is_gimple_address (t));
2518 /* Skip any references (they will be checked when we recurse down the
2519 tree) and ensure that any variable used as a prefix is marked
2520 addressable. */
2521 for (x = TREE_OPERAND (t, 0);
2522 handled_component_p (x);
2523 x = TREE_OPERAND (x, 0))
2526 if ((tem = verify_address (t, x)))
2527 return tem;
2529 if (!(TREE_CODE (x) == VAR_DECL
2530 || TREE_CODE (x) == PARM_DECL
2531 || TREE_CODE (x) == RESULT_DECL))
2532 return NULL;
2534 if (!TREE_ADDRESSABLE (x))
2536 error ("address taken, but ADDRESSABLE bit not set");
2537 return x;
2540 break;
2543 case COND_EXPR:
2544 x = COND_EXPR_COND (t);
2545 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2547 error ("non-integral used in condition");
2548 return x;
2550 if (!is_gimple_condexpr (x))
2552 error ("invalid conditional operand");
2553 return x;
2555 break;
2557 case NON_LVALUE_EXPR:
2558 case TRUTH_NOT_EXPR:
2559 gcc_unreachable ();
2561 CASE_CONVERT:
2562 case FIX_TRUNC_EXPR:
2563 case FLOAT_EXPR:
2564 case NEGATE_EXPR:
2565 case ABS_EXPR:
2566 case BIT_NOT_EXPR:
2567 CHECK_OP (0, "invalid operand to unary operator");
2568 break;
2570 case REALPART_EXPR:
2571 case IMAGPART_EXPR:
2572 case BIT_FIELD_REF:
2573 if (!is_gimple_reg_type (TREE_TYPE (t)))
2575 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2576 return t;
2579 if (TREE_CODE (t) == BIT_FIELD_REF)
2581 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2582 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2584 error ("invalid position or size operand to BIT_FIELD_REF");
2585 return t;
2587 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2588 && (TYPE_PRECISION (TREE_TYPE (t))
2589 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2591 error ("integral result type precision does not match "
2592 "field size of BIT_FIELD_REF");
2593 return t;
2595 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2596 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2597 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2598 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2599 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2601 error ("mode precision of non-integral result does not "
2602 "match field size of BIT_FIELD_REF");
2603 return t;
2606 t = TREE_OPERAND (t, 0);
2608 /* Fall-through. */
2609 case COMPONENT_REF:
2610 case ARRAY_REF:
2611 case ARRAY_RANGE_REF:
2612 case VIEW_CONVERT_EXPR:
2613 /* We have a nest of references. Verify that each of the operands
2614 that determine where to reference is either a constant or a variable,
2615 verify that the base is valid, and then show we've already checked
2616 the subtrees. */
2617 while (handled_component_p (t))
2619 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2620 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2621 else if (TREE_CODE (t) == ARRAY_REF
2622 || TREE_CODE (t) == ARRAY_RANGE_REF)
2624 CHECK_OP (1, "invalid array index");
2625 if (TREE_OPERAND (t, 2))
2626 CHECK_OP (2, "invalid array lower bound");
2627 if (TREE_OPERAND (t, 3))
2628 CHECK_OP (3, "invalid array stride");
2630 else if (TREE_CODE (t) == BIT_FIELD_REF
2631 || TREE_CODE (t) == REALPART_EXPR
2632 || TREE_CODE (t) == IMAGPART_EXPR)
2634 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2635 "REALPART_EXPR");
2636 return t;
2639 t = TREE_OPERAND (t, 0);
2642 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2644 error ("invalid reference prefix");
2645 return t;
2647 *walk_subtrees = 0;
2648 break;
2649 case PLUS_EXPR:
2650 case MINUS_EXPR:
2651 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2652 POINTER_PLUS_EXPR. */
2653 if (POINTER_TYPE_P (TREE_TYPE (t)))
2655 error ("invalid operand to plus/minus, type is a pointer");
2656 return t;
2658 CHECK_OP (0, "invalid operand to binary operator");
2659 CHECK_OP (1, "invalid operand to binary operator");
2660 break;
2662 case POINTER_PLUS_EXPR:
2663 /* Check to make sure the first operand is a pointer or reference type. */
2664 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2666 error ("invalid operand to pointer plus, first operand is not a pointer");
2667 return t;
2669 /* Check to make sure the second operand is a ptrofftype. */
2670 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2672 error ("invalid operand to pointer plus, second operand is not an "
2673 "integer type of appropriate width");
2674 return t;
2676 /* FALLTHROUGH */
2677 case LT_EXPR:
2678 case LE_EXPR:
2679 case GT_EXPR:
2680 case GE_EXPR:
2681 case EQ_EXPR:
2682 case NE_EXPR:
2683 case UNORDERED_EXPR:
2684 case ORDERED_EXPR:
2685 case UNLT_EXPR:
2686 case UNLE_EXPR:
2687 case UNGT_EXPR:
2688 case UNGE_EXPR:
2689 case UNEQ_EXPR:
2690 case LTGT_EXPR:
2691 case MULT_EXPR:
2692 case TRUNC_DIV_EXPR:
2693 case CEIL_DIV_EXPR:
2694 case FLOOR_DIV_EXPR:
2695 case ROUND_DIV_EXPR:
2696 case TRUNC_MOD_EXPR:
2697 case CEIL_MOD_EXPR:
2698 case FLOOR_MOD_EXPR:
2699 case ROUND_MOD_EXPR:
2700 case RDIV_EXPR:
2701 case EXACT_DIV_EXPR:
2702 case MIN_EXPR:
2703 case MAX_EXPR:
2704 case LSHIFT_EXPR:
2705 case RSHIFT_EXPR:
2706 case LROTATE_EXPR:
2707 case RROTATE_EXPR:
2708 case BIT_IOR_EXPR:
2709 case BIT_XOR_EXPR:
2710 case BIT_AND_EXPR:
2711 CHECK_OP (0, "invalid operand to binary operator");
2712 CHECK_OP (1, "invalid operand to binary operator");
2713 break;
2715 case CONSTRUCTOR:
2716 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2717 *walk_subtrees = 0;
2718 break;
2720 case CASE_LABEL_EXPR:
2721 if (CASE_CHAIN (t))
2723 error ("invalid CASE_CHAIN");
2724 return t;
2726 break;
2728 default:
2729 break;
2731 return NULL;
2733 #undef CHECK_OP
2737 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2738 Returns true if there is an error, otherwise false. */
2740 static bool
2741 verify_types_in_gimple_min_lval (tree expr)
2743 tree op;
2745 if (is_gimple_id (expr))
2746 return false;
2748 if (TREE_CODE (expr) != TARGET_MEM_REF
2749 && TREE_CODE (expr) != MEM_REF)
2751 error ("invalid expression for min lvalue");
2752 return true;
2755 /* TARGET_MEM_REFs are strange beasts. */
2756 if (TREE_CODE (expr) == TARGET_MEM_REF)
2757 return false;
2759 op = TREE_OPERAND (expr, 0);
2760 if (!is_gimple_val (op))
2762 error ("invalid operand in indirect reference");
2763 debug_generic_stmt (op);
2764 return true;
2766 /* Memory references now generally can involve a value conversion. */
2768 return false;
2771 /* Verify if EXPR is a valid GIMPLE reference expression. If
2772 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2773 if there is an error, otherwise false. */
2775 static bool
2776 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2778 while (handled_component_p (expr))
2780 tree op = TREE_OPERAND (expr, 0);
2782 if (TREE_CODE (expr) == ARRAY_REF
2783 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2785 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2786 || (TREE_OPERAND (expr, 2)
2787 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2788 || (TREE_OPERAND (expr, 3)
2789 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2791 error ("invalid operands to array reference");
2792 debug_generic_stmt (expr);
2793 return true;
2797 /* Verify if the reference array element types are compatible. */
2798 if (TREE_CODE (expr) == ARRAY_REF
2799 && !useless_type_conversion_p (TREE_TYPE (expr),
2800 TREE_TYPE (TREE_TYPE (op))))
2802 error ("type mismatch in array reference");
2803 debug_generic_stmt (TREE_TYPE (expr));
2804 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2805 return true;
2807 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2808 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2809 TREE_TYPE (TREE_TYPE (op))))
2811 error ("type mismatch in array range reference");
2812 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2813 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2814 return true;
2817 if ((TREE_CODE (expr) == REALPART_EXPR
2818 || TREE_CODE (expr) == IMAGPART_EXPR)
2819 && !useless_type_conversion_p (TREE_TYPE (expr),
2820 TREE_TYPE (TREE_TYPE (op))))
2822 error ("type mismatch in real/imagpart reference");
2823 debug_generic_stmt (TREE_TYPE (expr));
2824 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2825 return true;
2828 if (TREE_CODE (expr) == COMPONENT_REF
2829 && !useless_type_conversion_p (TREE_TYPE (expr),
2830 TREE_TYPE (TREE_OPERAND (expr, 1))))
2832 error ("type mismatch in component reference");
2833 debug_generic_stmt (TREE_TYPE (expr));
2834 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2835 return true;
2838 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2840 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2841 that their operand is not an SSA name or an invariant when
2842 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2843 bug). Otherwise there is nothing to verify, gross mismatches at
2844 most invoke undefined behavior. */
2845 if (require_lvalue
2846 && (TREE_CODE (op) == SSA_NAME
2847 || is_gimple_min_invariant (op)))
2849 error ("conversion of an SSA_NAME on the left hand side");
2850 debug_generic_stmt (expr);
2851 return true;
2853 else if (TREE_CODE (op) == SSA_NAME
2854 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2856 error ("conversion of register to a different size");
2857 debug_generic_stmt (expr);
2858 return true;
2860 else if (!handled_component_p (op))
2861 return false;
2864 expr = op;
2867 if (TREE_CODE (expr) == MEM_REF)
2869 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2871 error ("invalid address operand in MEM_REF");
2872 debug_generic_stmt (expr);
2873 return true;
2875 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2876 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2878 error ("invalid offset operand in MEM_REF");
2879 debug_generic_stmt (expr);
2880 return true;
2883 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2885 if (!TMR_BASE (expr)
2886 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2888 error ("invalid address operand in TARGET_MEM_REF");
2889 return true;
2891 if (!TMR_OFFSET (expr)
2892 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2893 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2895 error ("invalid offset operand in TARGET_MEM_REF");
2896 debug_generic_stmt (expr);
2897 return true;
2901 return ((require_lvalue || !is_gimple_min_invariant (expr))
2902 && verify_types_in_gimple_min_lval (expr));
2905 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2906 list of pointer-to types that is trivially convertible to DEST. */
2908 static bool
2909 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2911 tree src;
2913 if (!TYPE_POINTER_TO (src_obj))
2914 return true;
2916 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2917 if (useless_type_conversion_p (dest, src))
2918 return true;
2920 return false;
2923 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2924 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2926 static bool
2927 valid_fixed_convert_types_p (tree type1, tree type2)
2929 return (FIXED_POINT_TYPE_P (type1)
2930 && (INTEGRAL_TYPE_P (type2)
2931 || SCALAR_FLOAT_TYPE_P (type2)
2932 || FIXED_POINT_TYPE_P (type2)));
2935 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2936 is a problem, otherwise false. */
2938 static bool
2939 verify_gimple_call (gimple stmt)
2941 tree fn = gimple_call_fn (stmt);
2942 tree fntype, fndecl;
2943 unsigned i;
2945 if (gimple_call_internal_p (stmt))
2947 if (fn)
2949 error ("gimple call has two targets");
2950 debug_generic_stmt (fn);
2951 return true;
2954 else
2956 if (!fn)
2958 error ("gimple call has no target");
2959 return true;
2963 if (fn && !is_gimple_call_addr (fn))
2965 error ("invalid function in gimple call");
2966 debug_generic_stmt (fn);
2967 return true;
2970 if (fn
2971 && (!POINTER_TYPE_P (TREE_TYPE (fn))
2972 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2973 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
2975 error ("non-function in gimple call");
2976 return true;
2979 fndecl = gimple_call_fndecl (stmt);
2980 if (fndecl
2981 && TREE_CODE (fndecl) == FUNCTION_DECL
2982 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
2983 && !DECL_PURE_P (fndecl)
2984 && !TREE_READONLY (fndecl))
2986 error ("invalid pure const state for function");
2987 return true;
2990 if (gimple_call_lhs (stmt)
2991 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2992 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2994 error ("invalid LHS in gimple call");
2995 return true;
2998 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3000 error ("LHS in noreturn call");
3001 return true;
3004 fntype = gimple_call_fntype (stmt);
3005 if (fntype
3006 && gimple_call_lhs (stmt)
3007 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3008 TREE_TYPE (fntype))
3009 /* ??? At least C++ misses conversions at assignments from
3010 void * call results.
3011 ??? Java is completely off. Especially with functions
3012 returning java.lang.Object.
3013 For now simply allow arbitrary pointer type conversions. */
3014 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3015 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3017 error ("invalid conversion in gimple call");
3018 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3019 debug_generic_stmt (TREE_TYPE (fntype));
3020 return true;
3023 if (gimple_call_chain (stmt)
3024 && !is_gimple_val (gimple_call_chain (stmt)))
3026 error ("invalid static chain in gimple call");
3027 debug_generic_stmt (gimple_call_chain (stmt));
3028 return true;
3031 /* If there is a static chain argument, this should not be an indirect
3032 call, and the decl should have DECL_STATIC_CHAIN set. */
3033 if (gimple_call_chain (stmt))
3035 if (!gimple_call_fndecl (stmt))
3037 error ("static chain in indirect gimple call");
3038 return true;
3040 fn = TREE_OPERAND (fn, 0);
3042 if (!DECL_STATIC_CHAIN (fn))
3044 error ("static chain with function that doesn%'t use one");
3045 return true;
3049 /* ??? The C frontend passes unpromoted arguments in case it
3050 didn't see a function declaration before the call. So for now
3051 leave the call arguments mostly unverified. Once we gimplify
3052 unit-at-a-time we have a chance to fix this. */
3054 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3056 tree arg = gimple_call_arg (stmt, i);
3057 if ((is_gimple_reg_type (TREE_TYPE (arg))
3058 && !is_gimple_val (arg))
3059 || (!is_gimple_reg_type (TREE_TYPE (arg))
3060 && !is_gimple_lvalue (arg)))
3062 error ("invalid argument to gimple call");
3063 debug_generic_expr (arg);
3064 return true;
3068 return false;
3071 /* Verifies the gimple comparison with the result type TYPE and
3072 the operands OP0 and OP1. */
3074 static bool
3075 verify_gimple_comparison (tree type, tree op0, tree op1)
3077 tree op0_type = TREE_TYPE (op0);
3078 tree op1_type = TREE_TYPE (op1);
3080 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3082 error ("invalid operands in gimple comparison");
3083 return true;
3086 /* For comparisons we do not have the operations type as the
3087 effective type the comparison is carried out in. Instead
3088 we require that either the first operand is trivially
3089 convertible into the second, or the other way around.
3090 Because we special-case pointers to void we allow
3091 comparisons of pointers with the same mode as well. */
3092 if (!useless_type_conversion_p (op0_type, op1_type)
3093 && !useless_type_conversion_p (op1_type, op0_type)
3094 && (!POINTER_TYPE_P (op0_type)
3095 || !POINTER_TYPE_P (op1_type)
3096 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3098 error ("mismatching comparison operand types");
3099 debug_generic_expr (op0_type);
3100 debug_generic_expr (op1_type);
3101 return true;
3104 /* The resulting type of a comparison may be an effective boolean type. */
3105 if (INTEGRAL_TYPE_P (type)
3106 && (TREE_CODE (type) == BOOLEAN_TYPE
3107 || TYPE_PRECISION (type) == 1))
3109 if (TREE_CODE (op0_type) == VECTOR_TYPE
3110 || TREE_CODE (op1_type) == VECTOR_TYPE)
3112 error ("vector comparison returning a boolean");
3113 debug_generic_expr (op0_type);
3114 debug_generic_expr (op1_type);
3115 return true;
3118 /* Or an integer vector type with the same size and element count
3119 as the comparison operand types. */
3120 else if (TREE_CODE (type) == VECTOR_TYPE
3121 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3123 if (TREE_CODE (op0_type) != VECTOR_TYPE
3124 || TREE_CODE (op1_type) != VECTOR_TYPE)
3126 error ("non-vector operands in vector comparison");
3127 debug_generic_expr (op0_type);
3128 debug_generic_expr (op1_type);
3129 return true;
3132 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3133 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3134 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3135 /* The result of a vector comparison is of signed
3136 integral type. */
3137 || TYPE_UNSIGNED (TREE_TYPE (type)))
3139 error ("invalid vector comparison resulting type");
3140 debug_generic_expr (type);
3141 return true;
3144 else
3146 error ("bogus comparison result type");
3147 debug_generic_expr (type);
3148 return true;
3151 return false;
3154 /* Verify a gimple assignment statement STMT with an unary rhs.
3155 Returns true if anything is wrong. */
3157 static bool
3158 verify_gimple_assign_unary (gimple stmt)
3160 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3161 tree lhs = gimple_assign_lhs (stmt);
3162 tree lhs_type = TREE_TYPE (lhs);
3163 tree rhs1 = gimple_assign_rhs1 (stmt);
3164 tree rhs1_type = TREE_TYPE (rhs1);
3166 if (!is_gimple_reg (lhs))
3168 error ("non-register as LHS of unary operation");
3169 return true;
3172 if (!is_gimple_val (rhs1))
3174 error ("invalid operand in unary operation");
3175 return true;
3178 /* First handle conversions. */
3179 switch (rhs_code)
3181 CASE_CONVERT:
3183 /* Allow conversions from pointer type to integral type only if
3184 there is no sign or zero extension involved.
3185 For targets were the precision of ptrofftype doesn't match that
3186 of pointers we need to allow arbitrary conversions to ptrofftype. */
3187 if ((POINTER_TYPE_P (lhs_type)
3188 && INTEGRAL_TYPE_P (rhs1_type))
3189 || (POINTER_TYPE_P (rhs1_type)
3190 && INTEGRAL_TYPE_P (lhs_type)
3191 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3192 || ptrofftype_p (sizetype))))
3193 return false;
3195 /* Allow conversion from integral to offset type and vice versa. */
3196 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3197 && INTEGRAL_TYPE_P (rhs1_type))
3198 || (INTEGRAL_TYPE_P (lhs_type)
3199 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3200 return false;
3202 /* Otherwise assert we are converting between types of the
3203 same kind. */
3204 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3206 error ("invalid types in nop conversion");
3207 debug_generic_expr (lhs_type);
3208 debug_generic_expr (rhs1_type);
3209 return true;
3212 return false;
3215 case ADDR_SPACE_CONVERT_EXPR:
3217 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3218 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3219 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3221 error ("invalid types in address space conversion");
3222 debug_generic_expr (lhs_type);
3223 debug_generic_expr (rhs1_type);
3224 return true;
3227 return false;
3230 case FIXED_CONVERT_EXPR:
3232 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3233 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3235 error ("invalid types in fixed-point conversion");
3236 debug_generic_expr (lhs_type);
3237 debug_generic_expr (rhs1_type);
3238 return true;
3241 return false;
3244 case FLOAT_EXPR:
3246 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3247 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3248 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3250 error ("invalid types in conversion to floating point");
3251 debug_generic_expr (lhs_type);
3252 debug_generic_expr (rhs1_type);
3253 return true;
3256 return false;
3259 case FIX_TRUNC_EXPR:
3261 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3262 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3263 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3265 error ("invalid types in conversion to integer");
3266 debug_generic_expr (lhs_type);
3267 debug_generic_expr (rhs1_type);
3268 return true;
3271 return false;
3274 case VEC_UNPACK_HI_EXPR:
3275 case VEC_UNPACK_LO_EXPR:
3276 case REDUC_MAX_EXPR:
3277 case REDUC_MIN_EXPR:
3278 case REDUC_PLUS_EXPR:
3279 case VEC_UNPACK_FLOAT_HI_EXPR:
3280 case VEC_UNPACK_FLOAT_LO_EXPR:
3281 /* FIXME. */
3282 return false;
3284 case NEGATE_EXPR:
3285 case ABS_EXPR:
3286 case BIT_NOT_EXPR:
3287 case PAREN_EXPR:
3288 case NON_LVALUE_EXPR:
3289 case CONJ_EXPR:
3290 break;
3292 default:
3293 gcc_unreachable ();
3296 /* For the remaining codes assert there is no conversion involved. */
3297 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3299 error ("non-trivial conversion in unary operation");
3300 debug_generic_expr (lhs_type);
3301 debug_generic_expr (rhs1_type);
3302 return true;
3305 return false;
3308 /* Verify a gimple assignment statement STMT with a binary rhs.
3309 Returns true if anything is wrong. */
3311 static bool
3312 verify_gimple_assign_binary (gimple stmt)
3314 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3315 tree lhs = gimple_assign_lhs (stmt);
3316 tree lhs_type = TREE_TYPE (lhs);
3317 tree rhs1 = gimple_assign_rhs1 (stmt);
3318 tree rhs1_type = TREE_TYPE (rhs1);
3319 tree rhs2 = gimple_assign_rhs2 (stmt);
3320 tree rhs2_type = TREE_TYPE (rhs2);
3322 if (!is_gimple_reg (lhs))
3324 error ("non-register as LHS of binary operation");
3325 return true;
3328 if (!is_gimple_val (rhs1)
3329 || !is_gimple_val (rhs2))
3331 error ("invalid operands in binary operation");
3332 return true;
3335 /* First handle operations that involve different types. */
3336 switch (rhs_code)
3338 case COMPLEX_EXPR:
3340 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3341 || !(INTEGRAL_TYPE_P (rhs1_type)
3342 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3343 || !(INTEGRAL_TYPE_P (rhs2_type)
3344 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3346 error ("type mismatch in complex expression");
3347 debug_generic_expr (lhs_type);
3348 debug_generic_expr (rhs1_type);
3349 debug_generic_expr (rhs2_type);
3350 return true;
3353 return false;
3356 case LSHIFT_EXPR:
3357 case RSHIFT_EXPR:
3358 case LROTATE_EXPR:
3359 case RROTATE_EXPR:
3361 /* Shifts and rotates are ok on integral types, fixed point
3362 types and integer vector types. */
3363 if ((!INTEGRAL_TYPE_P (rhs1_type)
3364 && !FIXED_POINT_TYPE_P (rhs1_type)
3365 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3366 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3367 || (!INTEGRAL_TYPE_P (rhs2_type)
3368 /* Vector shifts of vectors are also ok. */
3369 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3370 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3371 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3372 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3373 || !useless_type_conversion_p (lhs_type, rhs1_type))
3375 error ("type mismatch in shift expression");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 debug_generic_expr (rhs2_type);
3379 return true;
3382 return false;
3385 case VEC_LSHIFT_EXPR:
3386 case VEC_RSHIFT_EXPR:
3388 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3389 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3390 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3391 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3392 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3393 || (!INTEGRAL_TYPE_P (rhs2_type)
3394 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3395 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3396 || !useless_type_conversion_p (lhs_type, rhs1_type))
3398 error ("type mismatch in vector shift expression");
3399 debug_generic_expr (lhs_type);
3400 debug_generic_expr (rhs1_type);
3401 debug_generic_expr (rhs2_type);
3402 return true;
3404 /* For shifting a vector of non-integral components we
3405 only allow shifting by a constant multiple of the element size. */
3406 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3407 && (TREE_CODE (rhs2) != INTEGER_CST
3408 || !div_if_zero_remainder (rhs2,
3409 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3411 error ("non-element sized vector shift of floating point vector");
3412 return true;
3415 return false;
3418 case WIDEN_LSHIFT_EXPR:
3420 if (!INTEGRAL_TYPE_P (lhs_type)
3421 || !INTEGRAL_TYPE_P (rhs1_type)
3422 || TREE_CODE (rhs2) != INTEGER_CST
3423 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3425 error ("type mismatch in widening vector shift expression");
3426 debug_generic_expr (lhs_type);
3427 debug_generic_expr (rhs1_type);
3428 debug_generic_expr (rhs2_type);
3429 return true;
3432 return false;
3435 case VEC_WIDEN_LSHIFT_HI_EXPR:
3436 case VEC_WIDEN_LSHIFT_LO_EXPR:
3438 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3439 || TREE_CODE (lhs_type) != VECTOR_TYPE
3440 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3441 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3442 || TREE_CODE (rhs2) != INTEGER_CST
3443 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3444 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3446 error ("type mismatch in widening vector shift expression");
3447 debug_generic_expr (lhs_type);
3448 debug_generic_expr (rhs1_type);
3449 debug_generic_expr (rhs2_type);
3450 return true;
3453 return false;
3456 case PLUS_EXPR:
3457 case MINUS_EXPR:
3459 tree lhs_etype = lhs_type;
3460 tree rhs1_etype = rhs1_type;
3461 tree rhs2_etype = rhs2_type;
3462 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3464 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3465 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3467 error ("invalid non-vector operands to vector valued plus");
3468 return true;
3470 lhs_etype = TREE_TYPE (lhs_type);
3471 rhs1_etype = TREE_TYPE (rhs1_type);
3472 rhs2_etype = TREE_TYPE (rhs2_type);
3474 if (POINTER_TYPE_P (lhs_etype)
3475 || POINTER_TYPE_P (rhs1_etype)
3476 || POINTER_TYPE_P (rhs2_etype))
3478 error ("invalid (pointer) operands to plus/minus");
3479 return true;
3482 /* Continue with generic binary expression handling. */
3483 break;
3486 case POINTER_PLUS_EXPR:
3488 if (!POINTER_TYPE_P (rhs1_type)
3489 || !useless_type_conversion_p (lhs_type, rhs1_type)
3490 || !ptrofftype_p (rhs2_type))
3492 error ("type mismatch in pointer plus expression");
3493 debug_generic_stmt (lhs_type);
3494 debug_generic_stmt (rhs1_type);
3495 debug_generic_stmt (rhs2_type);
3496 return true;
3499 return false;
3502 case TRUTH_ANDIF_EXPR:
3503 case TRUTH_ORIF_EXPR:
3504 case TRUTH_AND_EXPR:
3505 case TRUTH_OR_EXPR:
3506 case TRUTH_XOR_EXPR:
3508 gcc_unreachable ();
3510 case LT_EXPR:
3511 case LE_EXPR:
3512 case GT_EXPR:
3513 case GE_EXPR:
3514 case EQ_EXPR:
3515 case NE_EXPR:
3516 case UNORDERED_EXPR:
3517 case ORDERED_EXPR:
3518 case UNLT_EXPR:
3519 case UNLE_EXPR:
3520 case UNGT_EXPR:
3521 case UNGE_EXPR:
3522 case UNEQ_EXPR:
3523 case LTGT_EXPR:
3524 /* Comparisons are also binary, but the result type is not
3525 connected to the operand types. */
3526 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3528 case WIDEN_MULT_EXPR:
3529 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3530 return true;
3531 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3532 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3534 case WIDEN_SUM_EXPR:
3535 case VEC_WIDEN_MULT_HI_EXPR:
3536 case VEC_WIDEN_MULT_LO_EXPR:
3537 case VEC_WIDEN_MULT_EVEN_EXPR:
3538 case VEC_WIDEN_MULT_ODD_EXPR:
3539 case VEC_PACK_TRUNC_EXPR:
3540 case VEC_PACK_SAT_EXPR:
3541 case VEC_PACK_FIX_TRUNC_EXPR:
3542 /* FIXME. */
3543 return false;
3545 case MULT_EXPR:
3546 case MULT_HIGHPART_EXPR:
3547 case TRUNC_DIV_EXPR:
3548 case CEIL_DIV_EXPR:
3549 case FLOOR_DIV_EXPR:
3550 case ROUND_DIV_EXPR:
3551 case TRUNC_MOD_EXPR:
3552 case CEIL_MOD_EXPR:
3553 case FLOOR_MOD_EXPR:
3554 case ROUND_MOD_EXPR:
3555 case RDIV_EXPR:
3556 case EXACT_DIV_EXPR:
3557 case MIN_EXPR:
3558 case MAX_EXPR:
3559 case BIT_IOR_EXPR:
3560 case BIT_XOR_EXPR:
3561 case BIT_AND_EXPR:
3562 /* Continue with generic binary expression handling. */
3563 break;
3565 default:
3566 gcc_unreachable ();
3569 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3570 || !useless_type_conversion_p (lhs_type, rhs2_type))
3572 error ("type mismatch in binary expression");
3573 debug_generic_stmt (lhs_type);
3574 debug_generic_stmt (rhs1_type);
3575 debug_generic_stmt (rhs2_type);
3576 return true;
3579 return false;
3582 /* Verify a gimple assignment statement STMT with a ternary rhs.
3583 Returns true if anything is wrong. */
3585 static bool
3586 verify_gimple_assign_ternary (gimple stmt)
3588 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3589 tree lhs = gimple_assign_lhs (stmt);
3590 tree lhs_type = TREE_TYPE (lhs);
3591 tree rhs1 = gimple_assign_rhs1 (stmt);
3592 tree rhs1_type = TREE_TYPE (rhs1);
3593 tree rhs2 = gimple_assign_rhs2 (stmt);
3594 tree rhs2_type = TREE_TYPE (rhs2);
3595 tree rhs3 = gimple_assign_rhs3 (stmt);
3596 tree rhs3_type = TREE_TYPE (rhs3);
3598 if (!is_gimple_reg (lhs))
3600 error ("non-register as LHS of ternary operation");
3601 return true;
3604 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3605 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3606 || !is_gimple_val (rhs2)
3607 || !is_gimple_val (rhs3))
3609 error ("invalid operands in ternary operation");
3610 return true;
3613 /* First handle operations that involve different types. */
3614 switch (rhs_code)
3616 case WIDEN_MULT_PLUS_EXPR:
3617 case WIDEN_MULT_MINUS_EXPR:
3618 if ((!INTEGRAL_TYPE_P (rhs1_type)
3619 && !FIXED_POINT_TYPE_P (rhs1_type))
3620 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3621 || !useless_type_conversion_p (lhs_type, rhs3_type)
3622 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3623 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3625 error ("type mismatch in widening multiply-accumulate expression");
3626 debug_generic_expr (lhs_type);
3627 debug_generic_expr (rhs1_type);
3628 debug_generic_expr (rhs2_type);
3629 debug_generic_expr (rhs3_type);
3630 return true;
3632 break;
3634 case FMA_EXPR:
3635 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3636 || !useless_type_conversion_p (lhs_type, rhs2_type)
3637 || !useless_type_conversion_p (lhs_type, rhs3_type))
3639 error ("type mismatch in fused multiply-add expression");
3640 debug_generic_expr (lhs_type);
3641 debug_generic_expr (rhs1_type);
3642 debug_generic_expr (rhs2_type);
3643 debug_generic_expr (rhs3_type);
3644 return true;
3646 break;
3648 case COND_EXPR:
3649 case VEC_COND_EXPR:
3650 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3651 || !useless_type_conversion_p (lhs_type, rhs3_type))
3653 error ("type mismatch in conditional expression");
3654 debug_generic_expr (lhs_type);
3655 debug_generic_expr (rhs2_type);
3656 debug_generic_expr (rhs3_type);
3657 return true;
3659 break;
3661 case VEC_PERM_EXPR:
3662 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3663 || !useless_type_conversion_p (lhs_type, rhs2_type))
3665 error ("type mismatch in vector permute expression");
3666 debug_generic_expr (lhs_type);
3667 debug_generic_expr (rhs1_type);
3668 debug_generic_expr (rhs2_type);
3669 debug_generic_expr (rhs3_type);
3670 return true;
3673 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3674 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3675 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3677 error ("vector types expected in vector permute expression");
3678 debug_generic_expr (lhs_type);
3679 debug_generic_expr (rhs1_type);
3680 debug_generic_expr (rhs2_type);
3681 debug_generic_expr (rhs3_type);
3682 return true;
3685 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3686 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3687 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3688 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3689 != TYPE_VECTOR_SUBPARTS (lhs_type))
3691 error ("vectors with different element number found "
3692 "in vector permute expression");
3693 debug_generic_expr (lhs_type);
3694 debug_generic_expr (rhs1_type);
3695 debug_generic_expr (rhs2_type);
3696 debug_generic_expr (rhs3_type);
3697 return true;
3700 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3701 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3702 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3704 error ("invalid mask type in vector permute expression");
3705 debug_generic_expr (lhs_type);
3706 debug_generic_expr (rhs1_type);
3707 debug_generic_expr (rhs2_type);
3708 debug_generic_expr (rhs3_type);
3709 return true;
3712 return false;
3714 case DOT_PROD_EXPR:
3715 case REALIGN_LOAD_EXPR:
3716 /* FIXME. */
3717 return false;
3719 default:
3720 gcc_unreachable ();
3722 return false;
3725 /* Verify a gimple assignment statement STMT with a single rhs.
3726 Returns true if anything is wrong. */
3728 static bool
3729 verify_gimple_assign_single (gimple stmt)
3731 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3732 tree lhs = gimple_assign_lhs (stmt);
3733 tree lhs_type = TREE_TYPE (lhs);
3734 tree rhs1 = gimple_assign_rhs1 (stmt);
3735 tree rhs1_type = TREE_TYPE (rhs1);
3736 bool res = false;
3738 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3740 error ("non-trivial conversion at assignment");
3741 debug_generic_expr (lhs_type);
3742 debug_generic_expr (rhs1_type);
3743 return true;
3746 if (gimple_clobber_p (stmt)
3747 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3749 error ("non-decl/MEM_REF LHS in clobber statement");
3750 debug_generic_expr (lhs);
3751 return true;
3754 if (handled_component_p (lhs))
3755 res |= verify_types_in_gimple_reference (lhs, true);
3757 /* Special codes we cannot handle via their class. */
3758 switch (rhs_code)
3760 case ADDR_EXPR:
3762 tree op = TREE_OPERAND (rhs1, 0);
3763 if (!is_gimple_addressable (op))
3765 error ("invalid operand in unary expression");
3766 return true;
3769 /* Technically there is no longer a need for matching types, but
3770 gimple hygiene asks for this check. In LTO we can end up
3771 combining incompatible units and thus end up with addresses
3772 of globals that change their type to a common one. */
3773 if (!in_lto_p
3774 && !types_compatible_p (TREE_TYPE (op),
3775 TREE_TYPE (TREE_TYPE (rhs1)))
3776 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3777 TREE_TYPE (op)))
3779 error ("type mismatch in address expression");
3780 debug_generic_stmt (TREE_TYPE (rhs1));
3781 debug_generic_stmt (TREE_TYPE (op));
3782 return true;
3785 return verify_types_in_gimple_reference (op, true);
3788 /* tcc_reference */
3789 case INDIRECT_REF:
3790 error ("INDIRECT_REF in gimple IL");
3791 return true;
3793 case COMPONENT_REF:
3794 case BIT_FIELD_REF:
3795 case ARRAY_REF:
3796 case ARRAY_RANGE_REF:
3797 case VIEW_CONVERT_EXPR:
3798 case REALPART_EXPR:
3799 case IMAGPART_EXPR:
3800 case TARGET_MEM_REF:
3801 case MEM_REF:
3802 if (!is_gimple_reg (lhs)
3803 && is_gimple_reg_type (TREE_TYPE (lhs)))
3805 error ("invalid rhs for gimple memory store");
3806 debug_generic_stmt (lhs);
3807 debug_generic_stmt (rhs1);
3808 return true;
3810 return res || verify_types_in_gimple_reference (rhs1, false);
3812 /* tcc_constant */
3813 case SSA_NAME:
3814 case INTEGER_CST:
3815 case REAL_CST:
3816 case FIXED_CST:
3817 case COMPLEX_CST:
3818 case VECTOR_CST:
3819 case STRING_CST:
3820 return res;
3822 /* tcc_declaration */
3823 case CONST_DECL:
3824 return res;
3825 case VAR_DECL:
3826 case PARM_DECL:
3827 if (!is_gimple_reg (lhs)
3828 && !is_gimple_reg (rhs1)
3829 && is_gimple_reg_type (TREE_TYPE (lhs)))
3831 error ("invalid rhs for gimple memory store");
3832 debug_generic_stmt (lhs);
3833 debug_generic_stmt (rhs1);
3834 return true;
3836 return res;
3838 case CONSTRUCTOR:
3839 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3841 unsigned int i;
3842 tree elt_i, elt_v, elt_t = NULL_TREE;
3844 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3845 return res;
3846 /* For vector CONSTRUCTORs we require that either it is empty
3847 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3848 (then the element count must be correct to cover the whole
3849 outer vector and index must be NULL on all elements, or it is
3850 a CONSTRUCTOR of scalar elements, where we as an exception allow
3851 smaller number of elements (assuming zero filling) and
3852 consecutive indexes as compared to NULL indexes (such
3853 CONSTRUCTORs can appear in the IL from FEs). */
3854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3856 if (elt_t == NULL_TREE)
3858 elt_t = TREE_TYPE (elt_v);
3859 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3861 tree elt_t = TREE_TYPE (elt_v);
3862 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3863 TREE_TYPE (elt_t)))
3865 error ("incorrect type of vector CONSTRUCTOR"
3866 " elements");
3867 debug_generic_stmt (rhs1);
3868 return true;
3870 else if (CONSTRUCTOR_NELTS (rhs1)
3871 * TYPE_VECTOR_SUBPARTS (elt_t)
3872 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3874 error ("incorrect number of vector CONSTRUCTOR"
3875 " elements");
3876 debug_generic_stmt (rhs1);
3877 return true;
3880 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3881 elt_t))
3883 error ("incorrect type of vector CONSTRUCTOR elements");
3884 debug_generic_stmt (rhs1);
3885 return true;
3887 else if (CONSTRUCTOR_NELTS (rhs1)
3888 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3890 error ("incorrect number of vector CONSTRUCTOR elements");
3891 debug_generic_stmt (rhs1);
3892 return true;
3895 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3897 error ("incorrect type of vector CONSTRUCTOR elements");
3898 debug_generic_stmt (rhs1);
3899 return true;
3901 if (elt_i != NULL_TREE
3902 && (TREE_CODE (elt_t) == VECTOR_TYPE
3903 || TREE_CODE (elt_i) != INTEGER_CST
3904 || compare_tree_int (elt_i, i) != 0))
3906 error ("vector CONSTRUCTOR with non-NULL element index");
3907 debug_generic_stmt (rhs1);
3908 return true;
3912 return res;
3913 case OBJ_TYPE_REF:
3914 case ASSERT_EXPR:
3915 case WITH_SIZE_EXPR:
3916 /* FIXME. */
3917 return res;
3919 default:;
3922 return res;
3925 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3926 is a problem, otherwise false. */
3928 static bool
3929 verify_gimple_assign (gimple stmt)
3931 switch (gimple_assign_rhs_class (stmt))
3933 case GIMPLE_SINGLE_RHS:
3934 return verify_gimple_assign_single (stmt);
3936 case GIMPLE_UNARY_RHS:
3937 return verify_gimple_assign_unary (stmt);
3939 case GIMPLE_BINARY_RHS:
3940 return verify_gimple_assign_binary (stmt);
3942 case GIMPLE_TERNARY_RHS:
3943 return verify_gimple_assign_ternary (stmt);
3945 default:
3946 gcc_unreachable ();
3950 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3951 is a problem, otherwise false. */
3953 static bool
3954 verify_gimple_return (gimple stmt)
3956 tree op = gimple_return_retval (stmt);
3957 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3959 /* We cannot test for present return values as we do not fix up missing
3960 return values from the original source. */
3961 if (op == NULL)
3962 return false;
3964 if (!is_gimple_val (op)
3965 && TREE_CODE (op) != RESULT_DECL)
3967 error ("invalid operand in return statement");
3968 debug_generic_stmt (op);
3969 return true;
3972 if ((TREE_CODE (op) == RESULT_DECL
3973 && DECL_BY_REFERENCE (op))
3974 || (TREE_CODE (op) == SSA_NAME
3975 && SSA_NAME_VAR (op)
3976 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3977 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3978 op = TREE_TYPE (op);
3980 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3982 error ("invalid conversion in return statement");
3983 debug_generic_stmt (restype);
3984 debug_generic_stmt (TREE_TYPE (op));
3985 return true;
3988 return false;
3992 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3993 is a problem, otherwise false. */
3995 static bool
3996 verify_gimple_goto (gimple stmt)
3998 tree dest = gimple_goto_dest (stmt);
4000 /* ??? We have two canonical forms of direct goto destinations, a
4001 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4002 if (TREE_CODE (dest) != LABEL_DECL
4003 && (!is_gimple_val (dest)
4004 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4006 error ("goto destination is neither a label nor a pointer");
4007 return true;
4010 return false;
4013 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4014 is a problem, otherwise false. */
4016 static bool
4017 verify_gimple_switch (gimple stmt)
4019 unsigned int i, n;
4020 tree elt, prev_upper_bound = NULL_TREE;
4021 tree index_type, elt_type = NULL_TREE;
4023 if (!is_gimple_val (gimple_switch_index (stmt)))
4025 error ("invalid operand to switch statement");
4026 debug_generic_stmt (gimple_switch_index (stmt));
4027 return true;
4030 index_type = TREE_TYPE (gimple_switch_index (stmt));
4031 if (! INTEGRAL_TYPE_P (index_type))
4033 error ("non-integral type switch statement");
4034 debug_generic_expr (index_type);
4035 return true;
4038 elt = gimple_switch_label (stmt, 0);
4039 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4041 error ("invalid default case label in switch statement");
4042 debug_generic_expr (elt);
4043 return true;
4046 n = gimple_switch_num_labels (stmt);
4047 for (i = 1; i < n; i++)
4049 elt = gimple_switch_label (stmt, i);
4051 if (! CASE_LOW (elt))
4053 error ("invalid case label in switch statement");
4054 debug_generic_expr (elt);
4055 return true;
4057 if (CASE_HIGH (elt)
4058 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4060 error ("invalid case range in switch statement");
4061 debug_generic_expr (elt);
4062 return true;
4065 if (elt_type)
4067 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4068 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4070 error ("type mismatch for case label in switch statement");
4071 debug_generic_expr (elt);
4072 return true;
4075 else
4077 elt_type = TREE_TYPE (CASE_LOW (elt));
4078 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4080 error ("type precision mismatch in switch statement");
4081 return true;
4085 if (prev_upper_bound)
4087 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4089 error ("case labels not sorted in switch statement");
4090 return true;
4094 prev_upper_bound = CASE_HIGH (elt);
4095 if (! prev_upper_bound)
4096 prev_upper_bound = CASE_LOW (elt);
4099 return false;
4102 /* Verify a gimple debug statement STMT.
4103 Returns true if anything is wrong. */
4105 static bool
4106 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4108 /* There isn't much that could be wrong in a gimple debug stmt. A
4109 gimple debug bind stmt, for example, maps a tree, that's usually
4110 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4111 component or member of an aggregate type, to another tree, that
4112 can be an arbitrary expression. These stmts expand into debug
4113 insns, and are converted to debug notes by var-tracking.c. */
4114 return false;
4117 /* Verify a gimple label statement STMT.
4118 Returns true if anything is wrong. */
4120 static bool
4121 verify_gimple_label (gimple stmt)
4123 tree decl = gimple_label_label (stmt);
4124 int uid;
4125 bool err = false;
4127 if (TREE_CODE (decl) != LABEL_DECL)
4128 return true;
4129 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4130 && DECL_CONTEXT (decl) != current_function_decl)
4132 error ("label's context is not the current function decl");
4133 err |= true;
4136 uid = LABEL_DECL_UID (decl);
4137 if (cfun->cfg
4138 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4140 error ("incorrect entry in label_to_block_map");
4141 err |= true;
4144 uid = EH_LANDING_PAD_NR (decl);
4145 if (uid)
4147 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4148 if (decl != lp->post_landing_pad)
4150 error ("incorrect setting of landing pad number");
4151 err |= true;
4155 return err;
4158 /* Verify the GIMPLE statement STMT. Returns true if there is an
4159 error, otherwise false. */
4161 static bool
4162 verify_gimple_stmt (gimple stmt)
4164 switch (gimple_code (stmt))
4166 case GIMPLE_ASSIGN:
4167 return verify_gimple_assign (stmt);
4169 case GIMPLE_LABEL:
4170 return verify_gimple_label (stmt);
4172 case GIMPLE_CALL:
4173 return verify_gimple_call (stmt);
4175 case GIMPLE_COND:
4176 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4178 error ("invalid comparison code in gimple cond");
4179 return true;
4181 if (!(!gimple_cond_true_label (stmt)
4182 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4183 || !(!gimple_cond_false_label (stmt)
4184 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4186 error ("invalid labels in gimple cond");
4187 return true;
4190 return verify_gimple_comparison (boolean_type_node,
4191 gimple_cond_lhs (stmt),
4192 gimple_cond_rhs (stmt));
4194 case GIMPLE_GOTO:
4195 return verify_gimple_goto (stmt);
4197 case GIMPLE_SWITCH:
4198 return verify_gimple_switch (stmt);
4200 case GIMPLE_RETURN:
4201 return verify_gimple_return (stmt);
4203 case GIMPLE_ASM:
4204 return false;
4206 case GIMPLE_TRANSACTION:
4207 return verify_gimple_transaction (stmt);
4209 /* Tuples that do not have tree operands. */
4210 case GIMPLE_NOP:
4211 case GIMPLE_PREDICT:
4212 case GIMPLE_RESX:
4213 case GIMPLE_EH_DISPATCH:
4214 case GIMPLE_EH_MUST_NOT_THROW:
4215 return false;
4217 CASE_GIMPLE_OMP:
4218 /* OpenMP directives are validated by the FE and never operated
4219 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4220 non-gimple expressions when the main index variable has had
4221 its address taken. This does not affect the loop itself
4222 because the header of an GIMPLE_OMP_FOR is merely used to determine
4223 how to setup the parallel iteration. */
4224 return false;
4226 case GIMPLE_DEBUG:
4227 return verify_gimple_debug (stmt);
4229 default:
4230 gcc_unreachable ();
4234 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4235 and false otherwise. */
4237 static bool
4238 verify_gimple_phi (gimple phi)
4240 bool err = false;
4241 unsigned i;
4242 tree phi_result = gimple_phi_result (phi);
4243 bool virtual_p;
4245 if (!phi_result)
4247 error ("invalid PHI result");
4248 return true;
4251 virtual_p = virtual_operand_p (phi_result);
4252 if (TREE_CODE (phi_result) != SSA_NAME
4253 || (virtual_p
4254 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4256 error ("invalid PHI result");
4257 err = true;
4260 for (i = 0; i < gimple_phi_num_args (phi); i++)
4262 tree t = gimple_phi_arg_def (phi, i);
4264 if (!t)
4266 error ("missing PHI def");
4267 err |= true;
4268 continue;
4270 /* Addressable variables do have SSA_NAMEs but they
4271 are not considered gimple values. */
4272 else if ((TREE_CODE (t) == SSA_NAME
4273 && virtual_p != virtual_operand_p (t))
4274 || (virtual_p
4275 && (TREE_CODE (t) != SSA_NAME
4276 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4277 || (!virtual_p
4278 && !is_gimple_val (t)))
4280 error ("invalid PHI argument");
4281 debug_generic_expr (t);
4282 err |= true;
4284 #ifdef ENABLE_TYPES_CHECKING
4285 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4287 error ("incompatible types in PHI argument %u", i);
4288 debug_generic_stmt (TREE_TYPE (phi_result));
4289 debug_generic_stmt (TREE_TYPE (t));
4290 err |= true;
4292 #endif
4295 return err;
4298 /* Verify the GIMPLE statements inside the sequence STMTS. */
4300 static bool
4301 verify_gimple_in_seq_2 (gimple_seq stmts)
4303 gimple_stmt_iterator ittr;
4304 bool err = false;
4306 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4308 gimple stmt = gsi_stmt (ittr);
4310 switch (gimple_code (stmt))
4312 case GIMPLE_BIND:
4313 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4314 break;
4316 case GIMPLE_TRY:
4317 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4318 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4319 break;
4321 case GIMPLE_EH_FILTER:
4322 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4323 break;
4325 case GIMPLE_EH_ELSE:
4326 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4327 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4328 break;
4330 case GIMPLE_CATCH:
4331 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4332 break;
4334 case GIMPLE_TRANSACTION:
4335 err |= verify_gimple_transaction (stmt);
4336 break;
4338 default:
4340 bool err2 = verify_gimple_stmt (stmt);
4341 if (err2)
4342 debug_gimple_stmt (stmt);
4343 err |= err2;
4348 return err;
4351 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4352 is a problem, otherwise false. */
4354 static bool
4355 verify_gimple_transaction (gimple stmt)
4357 tree lab = gimple_transaction_label (stmt);
4358 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4359 return true;
4360 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4364 /* Verify the GIMPLE statements inside the statement list STMTS. */
4366 DEBUG_FUNCTION void
4367 verify_gimple_in_seq (gimple_seq stmts)
4369 timevar_push (TV_TREE_STMT_VERIFY);
4370 if (verify_gimple_in_seq_2 (stmts))
4371 internal_error ("verify_gimple failed");
4372 timevar_pop (TV_TREE_STMT_VERIFY);
4375 /* Return true when the T can be shared. */
4377 static bool
4378 tree_node_can_be_shared (tree t)
4380 if (IS_TYPE_OR_DECL_P (t)
4381 || is_gimple_min_invariant (t)
4382 || TREE_CODE (t) == SSA_NAME
4383 || t == error_mark_node
4384 || TREE_CODE (t) == IDENTIFIER_NODE)
4385 return true;
4387 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4388 return true;
4390 if (DECL_P (t))
4391 return true;
4393 return false;
4396 /* Called via walk_tree. Verify tree sharing. */
4398 static tree
4399 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4401 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4403 if (tree_node_can_be_shared (*tp))
4405 *walk_subtrees = false;
4406 return NULL;
4409 if (pointer_set_insert (visited, *tp))
4410 return *tp;
4412 return NULL;
4415 /* Called via walk_gimple_stmt. Verify tree sharing. */
4417 static tree
4418 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4420 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4421 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4424 static bool eh_error_found;
4425 static int
4426 verify_eh_throw_stmt_node (void **slot, void *data)
4428 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4429 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4431 if (!pointer_set_contains (visited, node->stmt))
4433 error ("dead STMT in EH table");
4434 debug_gimple_stmt (node->stmt);
4435 eh_error_found = true;
4437 return 1;
4440 /* Verify if the location LOCs block is in BLOCKS. */
4442 static bool
4443 verify_location (pointer_set_t *blocks, location_t loc)
4445 tree block = LOCATION_BLOCK (loc);
4446 if (block != NULL_TREE
4447 && !pointer_set_contains (blocks, block))
4449 error ("location references block not in block tree");
4450 return true;
4452 if (block != NULL_TREE)
4453 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4454 return false;
4457 /* Called via walk_tree. Verify that expressions have no blocks. */
4459 static tree
4460 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4462 if (!EXPR_P (*tp))
4464 *walk_subtrees = false;
4465 return NULL;
4468 location_t loc = EXPR_LOCATION (*tp);
4469 if (LOCATION_BLOCK (loc) != NULL)
4470 return *tp;
4472 return NULL;
4475 /* Called via walk_tree. Verify locations of expressions. */
4477 static tree
4478 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4480 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4482 if (TREE_CODE (*tp) == VAR_DECL
4483 && DECL_HAS_DEBUG_EXPR_P (*tp))
4485 tree t = DECL_DEBUG_EXPR (*tp);
4486 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4487 if (addr)
4488 return addr;
4490 if ((TREE_CODE (*tp) == VAR_DECL
4491 || TREE_CODE (*tp) == PARM_DECL
4492 || TREE_CODE (*tp) == RESULT_DECL)
4493 && DECL_HAS_VALUE_EXPR_P (*tp))
4495 tree t = DECL_VALUE_EXPR (*tp);
4496 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4497 if (addr)
4498 return addr;
4501 if (!EXPR_P (*tp))
4503 *walk_subtrees = false;
4504 return NULL;
4507 location_t loc = EXPR_LOCATION (*tp);
4508 if (verify_location (blocks, loc))
4509 return *tp;
4511 return NULL;
4514 /* Called via walk_gimple_op. Verify locations of expressions. */
4516 static tree
4517 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4519 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4520 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4523 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4525 static void
4526 collect_subblocks (pointer_set_t *blocks, tree block)
4528 tree t;
4529 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4531 pointer_set_insert (blocks, t);
4532 collect_subblocks (blocks, t);
4536 /* Verify the GIMPLE statements in the CFG of FN. */
4538 DEBUG_FUNCTION void
4539 verify_gimple_in_cfg (struct function *fn)
4541 basic_block bb;
4542 bool err = false;
4543 struct pointer_set_t *visited, *visited_stmts, *blocks;
4545 timevar_push (TV_TREE_STMT_VERIFY);
4546 visited = pointer_set_create ();
4547 visited_stmts = pointer_set_create ();
4549 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4550 blocks = pointer_set_create ();
4551 if (DECL_INITIAL (fn->decl))
4553 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4554 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4557 FOR_EACH_BB_FN (bb, fn)
4559 gimple_stmt_iterator gsi;
4561 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4563 gimple phi = gsi_stmt (gsi);
4564 bool err2 = false;
4565 unsigned i;
4567 pointer_set_insert (visited_stmts, phi);
4569 if (gimple_bb (phi) != bb)
4571 error ("gimple_bb (phi) is set to a wrong basic block");
4572 err2 = true;
4575 err2 |= verify_gimple_phi (phi);
4577 /* Only PHI arguments have locations. */
4578 if (gimple_location (phi) != UNKNOWN_LOCATION)
4580 error ("PHI node with location");
4581 err2 = true;
4584 for (i = 0; i < gimple_phi_num_args (phi); i++)
4586 tree arg = gimple_phi_arg_def (phi, i);
4587 tree addr = walk_tree (&arg, verify_node_sharing_1,
4588 visited, NULL);
4589 if (addr)
4591 error ("incorrect sharing of tree nodes");
4592 debug_generic_expr (addr);
4593 err2 |= true;
4595 location_t loc = gimple_phi_arg_location (phi, i);
4596 if (virtual_operand_p (gimple_phi_result (phi))
4597 && loc != UNKNOWN_LOCATION)
4599 error ("virtual PHI with argument locations");
4600 err2 = true;
4602 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4603 if (addr)
4605 debug_generic_expr (addr);
4606 err2 = true;
4608 err2 |= verify_location (blocks, loc);
4611 if (err2)
4612 debug_gimple_stmt (phi);
4613 err |= err2;
4616 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4618 gimple stmt = gsi_stmt (gsi);
4619 bool err2 = false;
4620 struct walk_stmt_info wi;
4621 tree addr;
4622 int lp_nr;
4624 pointer_set_insert (visited_stmts, stmt);
4626 if (gimple_bb (stmt) != bb)
4628 error ("gimple_bb (stmt) is set to a wrong basic block");
4629 err2 = true;
4632 err2 |= verify_gimple_stmt (stmt);
4633 err2 |= verify_location (blocks, gimple_location (stmt));
4635 memset (&wi, 0, sizeof (wi));
4636 wi.info = (void *) visited;
4637 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4638 if (addr)
4640 error ("incorrect sharing of tree nodes");
4641 debug_generic_expr (addr);
4642 err2 |= true;
4645 memset (&wi, 0, sizeof (wi));
4646 wi.info = (void *) blocks;
4647 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4648 if (addr)
4650 debug_generic_expr (addr);
4651 err2 |= true;
4654 /* ??? Instead of not checking these stmts at all the walker
4655 should know its context via wi. */
4656 if (!is_gimple_debug (stmt)
4657 && !is_gimple_omp (stmt))
4659 memset (&wi, 0, sizeof (wi));
4660 addr = walk_gimple_op (stmt, verify_expr, &wi);
4661 if (addr)
4663 debug_generic_expr (addr);
4664 inform (gimple_location (stmt), "in statement");
4665 err2 |= true;
4669 /* If the statement is marked as part of an EH region, then it is
4670 expected that the statement could throw. Verify that when we
4671 have optimizations that simplify statements such that we prove
4672 that they cannot throw, that we update other data structures
4673 to match. */
4674 lp_nr = lookup_stmt_eh_lp (stmt);
4675 if (lp_nr != 0)
4677 if (!stmt_could_throw_p (stmt))
4679 error ("statement marked for throw, but doesn%'t");
4680 err2 |= true;
4682 else if (lp_nr > 0
4683 && !gsi_one_before_end_p (gsi)
4684 && stmt_can_throw_internal (stmt))
4686 error ("statement marked for throw in middle of block");
4687 err2 |= true;
4691 if (err2)
4692 debug_gimple_stmt (stmt);
4693 err |= err2;
4697 eh_error_found = false;
4698 if (get_eh_throw_stmt_table (cfun))
4699 htab_traverse (get_eh_throw_stmt_table (cfun),
4700 verify_eh_throw_stmt_node,
4701 visited_stmts);
4703 if (err || eh_error_found)
4704 internal_error ("verify_gimple failed");
4706 pointer_set_destroy (visited);
4707 pointer_set_destroy (visited_stmts);
4708 pointer_set_destroy (blocks);
4709 verify_histograms ();
4710 timevar_pop (TV_TREE_STMT_VERIFY);
4714 /* Verifies that the flow information is OK. */
4716 static int
4717 gimple_verify_flow_info (void)
4719 int err = 0;
4720 basic_block bb;
4721 gimple_stmt_iterator gsi;
4722 gimple stmt;
4723 edge e;
4724 edge_iterator ei;
4726 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4728 error ("ENTRY_BLOCK has IL associated with it");
4729 err = 1;
4732 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4734 error ("EXIT_BLOCK has IL associated with it");
4735 err = 1;
4738 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4739 if (e->flags & EDGE_FALLTHRU)
4741 error ("fallthru to exit from bb %d", e->src->index);
4742 err = 1;
4745 FOR_EACH_BB (bb)
4747 bool found_ctrl_stmt = false;
4749 stmt = NULL;
4751 /* Skip labels on the start of basic block. */
4752 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4754 tree label;
4755 gimple prev_stmt = stmt;
4757 stmt = gsi_stmt (gsi);
4759 if (gimple_code (stmt) != GIMPLE_LABEL)
4760 break;
4762 label = gimple_label_label (stmt);
4763 if (prev_stmt && DECL_NONLOCAL (label))
4765 error ("nonlocal label ");
4766 print_generic_expr (stderr, label, 0);
4767 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4768 bb->index);
4769 err = 1;
4772 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4774 error ("EH landing pad label ");
4775 print_generic_expr (stderr, label, 0);
4776 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4777 bb->index);
4778 err = 1;
4781 if (label_to_block (label) != bb)
4783 error ("label ");
4784 print_generic_expr (stderr, label, 0);
4785 fprintf (stderr, " to block does not match in bb %d",
4786 bb->index);
4787 err = 1;
4790 if (decl_function_context (label) != current_function_decl)
4792 error ("label ");
4793 print_generic_expr (stderr, label, 0);
4794 fprintf (stderr, " has incorrect context in bb %d",
4795 bb->index);
4796 err = 1;
4800 /* Verify that body of basic block BB is free of control flow. */
4801 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4803 gimple stmt = gsi_stmt (gsi);
4805 if (found_ctrl_stmt)
4807 error ("control flow in the middle of basic block %d",
4808 bb->index);
4809 err = 1;
4812 if (stmt_ends_bb_p (stmt))
4813 found_ctrl_stmt = true;
4815 if (gimple_code (stmt) == GIMPLE_LABEL)
4817 error ("label ");
4818 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4819 fprintf (stderr, " in the middle of basic block %d", bb->index);
4820 err = 1;
4824 gsi = gsi_last_bb (bb);
4825 if (gsi_end_p (gsi))
4826 continue;
4828 stmt = gsi_stmt (gsi);
4830 if (gimple_code (stmt) == GIMPLE_LABEL)
4831 continue;
4833 err |= verify_eh_edges (stmt);
4835 if (is_ctrl_stmt (stmt))
4837 FOR_EACH_EDGE (e, ei, bb->succs)
4838 if (e->flags & EDGE_FALLTHRU)
4840 error ("fallthru edge after a control statement in bb %d",
4841 bb->index);
4842 err = 1;
4846 if (gimple_code (stmt) != GIMPLE_COND)
4848 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4849 after anything else but if statement. */
4850 FOR_EACH_EDGE (e, ei, bb->succs)
4851 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4853 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4854 bb->index);
4855 err = 1;
4859 switch (gimple_code (stmt))
4861 case GIMPLE_COND:
4863 edge true_edge;
4864 edge false_edge;
4866 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4868 if (!true_edge
4869 || !false_edge
4870 || !(true_edge->flags & EDGE_TRUE_VALUE)
4871 || !(false_edge->flags & EDGE_FALSE_VALUE)
4872 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4873 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4874 || EDGE_COUNT (bb->succs) >= 3)
4876 error ("wrong outgoing edge flags at end of bb %d",
4877 bb->index);
4878 err = 1;
4881 break;
4883 case GIMPLE_GOTO:
4884 if (simple_goto_p (stmt))
4886 error ("explicit goto at end of bb %d", bb->index);
4887 err = 1;
4889 else
4891 /* FIXME. We should double check that the labels in the
4892 destination blocks have their address taken. */
4893 FOR_EACH_EDGE (e, ei, bb->succs)
4894 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4895 | EDGE_FALSE_VALUE))
4896 || !(e->flags & EDGE_ABNORMAL))
4898 error ("wrong outgoing edge flags at end of bb %d",
4899 bb->index);
4900 err = 1;
4903 break;
4905 case GIMPLE_CALL:
4906 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4907 break;
4908 /* ... fallthru ... */
4909 case GIMPLE_RETURN:
4910 if (!single_succ_p (bb)
4911 || (single_succ_edge (bb)->flags
4912 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4913 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4915 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4916 err = 1;
4918 if (single_succ (bb) != EXIT_BLOCK_PTR)
4920 error ("return edge does not point to exit in bb %d",
4921 bb->index);
4922 err = 1;
4924 break;
4926 case GIMPLE_SWITCH:
4928 tree prev;
4929 edge e;
4930 size_t i, n;
4932 n = gimple_switch_num_labels (stmt);
4934 /* Mark all the destination basic blocks. */
4935 for (i = 0; i < n; ++i)
4937 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4938 basic_block label_bb = label_to_block (lab);
4939 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4940 label_bb->aux = (void *)1;
4943 /* Verify that the case labels are sorted. */
4944 prev = gimple_switch_label (stmt, 0);
4945 for (i = 1; i < n; ++i)
4947 tree c = gimple_switch_label (stmt, i);
4948 if (!CASE_LOW (c))
4950 error ("found default case not at the start of "
4951 "case vector");
4952 err = 1;
4953 continue;
4955 if (CASE_LOW (prev)
4956 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4958 error ("case labels not sorted: ");
4959 print_generic_expr (stderr, prev, 0);
4960 fprintf (stderr," is greater than ");
4961 print_generic_expr (stderr, c, 0);
4962 fprintf (stderr," but comes before it.\n");
4963 err = 1;
4965 prev = c;
4967 /* VRP will remove the default case if it can prove it will
4968 never be executed. So do not verify there always exists
4969 a default case here. */
4971 FOR_EACH_EDGE (e, ei, bb->succs)
4973 if (!e->dest->aux)
4975 error ("extra outgoing edge %d->%d",
4976 bb->index, e->dest->index);
4977 err = 1;
4980 e->dest->aux = (void *)2;
4981 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4982 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4984 error ("wrong outgoing edge flags at end of bb %d",
4985 bb->index);
4986 err = 1;
4990 /* Check that we have all of them. */
4991 for (i = 0; i < n; ++i)
4993 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4994 basic_block label_bb = label_to_block (lab);
4996 if (label_bb->aux != (void *)2)
4998 error ("missing edge %i->%i", bb->index, label_bb->index);
4999 err = 1;
5003 FOR_EACH_EDGE (e, ei, bb->succs)
5004 e->dest->aux = (void *)0;
5006 break;
5008 case GIMPLE_EH_DISPATCH:
5009 err |= verify_eh_dispatch_edge (stmt);
5010 break;
5012 default:
5013 break;
5017 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5018 verify_dominators (CDI_DOMINATORS);
5020 return err;
5024 /* Updates phi nodes after creating a forwarder block joined
5025 by edge FALLTHRU. */
5027 static void
5028 gimple_make_forwarder_block (edge fallthru)
5030 edge e;
5031 edge_iterator ei;
5032 basic_block dummy, bb;
5033 tree var;
5034 gimple_stmt_iterator gsi;
5036 dummy = fallthru->src;
5037 bb = fallthru->dest;
5039 if (single_pred_p (bb))
5040 return;
5042 /* If we redirected a branch we must create new PHI nodes at the
5043 start of BB. */
5044 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5046 gimple phi, new_phi;
5048 phi = gsi_stmt (gsi);
5049 var = gimple_phi_result (phi);
5050 new_phi = create_phi_node (var, bb);
5051 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5052 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5053 UNKNOWN_LOCATION);
5056 /* Add the arguments we have stored on edges. */
5057 FOR_EACH_EDGE (e, ei, bb->preds)
5059 if (e == fallthru)
5060 continue;
5062 flush_pending_stmts (e);
5067 /* Return a non-special label in the head of basic block BLOCK.
5068 Create one if it doesn't exist. */
5070 tree
5071 gimple_block_label (basic_block bb)
5073 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5074 bool first = true;
5075 tree label;
5076 gimple stmt;
5078 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5080 stmt = gsi_stmt (i);
5081 if (gimple_code (stmt) != GIMPLE_LABEL)
5082 break;
5083 label = gimple_label_label (stmt);
5084 if (!DECL_NONLOCAL (label))
5086 if (!first)
5087 gsi_move_before (&i, &s);
5088 return label;
5092 label = create_artificial_label (UNKNOWN_LOCATION);
5093 stmt = gimple_build_label (label);
5094 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5095 return label;
5099 /* Attempt to perform edge redirection by replacing a possibly complex
5100 jump instruction by a goto or by removing the jump completely.
5101 This can apply only if all edges now point to the same block. The
5102 parameters and return values are equivalent to
5103 redirect_edge_and_branch. */
5105 static edge
5106 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5108 basic_block src = e->src;
5109 gimple_stmt_iterator i;
5110 gimple stmt;
5112 /* We can replace or remove a complex jump only when we have exactly
5113 two edges. */
5114 if (EDGE_COUNT (src->succs) != 2
5115 /* Verify that all targets will be TARGET. Specifically, the
5116 edge that is not E must also go to TARGET. */
5117 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5118 return NULL;
5120 i = gsi_last_bb (src);
5121 if (gsi_end_p (i))
5122 return NULL;
5124 stmt = gsi_stmt (i);
5126 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5128 gsi_remove (&i, true);
5129 e = ssa_redirect_edge (e, target);
5130 e->flags = EDGE_FALLTHRU;
5131 return e;
5134 return NULL;
5138 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5139 edge representing the redirected branch. */
5141 static edge
5142 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5144 basic_block bb = e->src;
5145 gimple_stmt_iterator gsi;
5146 edge ret;
5147 gimple stmt;
5149 if (e->flags & EDGE_ABNORMAL)
5150 return NULL;
5152 if (e->dest == dest)
5153 return NULL;
5155 if (e->flags & EDGE_EH)
5156 return redirect_eh_edge (e, dest);
5158 if (e->src != ENTRY_BLOCK_PTR)
5160 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5161 if (ret)
5162 return ret;
5165 gsi = gsi_last_bb (bb);
5166 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5168 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5170 case GIMPLE_COND:
5171 /* For COND_EXPR, we only need to redirect the edge. */
5172 break;
5174 case GIMPLE_GOTO:
5175 /* No non-abnormal edges should lead from a non-simple goto, and
5176 simple ones should be represented implicitly. */
5177 gcc_unreachable ();
5179 case GIMPLE_SWITCH:
5181 tree label = gimple_block_label (dest);
5182 tree cases = get_cases_for_edge (e, stmt);
5184 /* If we have a list of cases associated with E, then use it
5185 as it's a lot faster than walking the entire case vector. */
5186 if (cases)
5188 edge e2 = find_edge (e->src, dest);
5189 tree last, first;
5191 first = cases;
5192 while (cases)
5194 last = cases;
5195 CASE_LABEL (cases) = label;
5196 cases = CASE_CHAIN (cases);
5199 /* If there was already an edge in the CFG, then we need
5200 to move all the cases associated with E to E2. */
5201 if (e2)
5203 tree cases2 = get_cases_for_edge (e2, stmt);
5205 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5206 CASE_CHAIN (cases2) = first;
5208 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5210 else
5212 size_t i, n = gimple_switch_num_labels (stmt);
5214 for (i = 0; i < n; i++)
5216 tree elt = gimple_switch_label (stmt, i);
5217 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5218 CASE_LABEL (elt) = label;
5222 break;
5224 case GIMPLE_ASM:
5226 int i, n = gimple_asm_nlabels (stmt);
5227 tree label = NULL;
5229 for (i = 0; i < n; ++i)
5231 tree cons = gimple_asm_label_op (stmt, i);
5232 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5234 if (!label)
5235 label = gimple_block_label (dest);
5236 TREE_VALUE (cons) = label;
5240 /* If we didn't find any label matching the former edge in the
5241 asm labels, we must be redirecting the fallthrough
5242 edge. */
5243 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5245 break;
5247 case GIMPLE_RETURN:
5248 gsi_remove (&gsi, true);
5249 e->flags |= EDGE_FALLTHRU;
5250 break;
5252 case GIMPLE_OMP_RETURN:
5253 case GIMPLE_OMP_CONTINUE:
5254 case GIMPLE_OMP_SECTIONS_SWITCH:
5255 case GIMPLE_OMP_FOR:
5256 /* The edges from OMP constructs can be simply redirected. */
5257 break;
5259 case GIMPLE_EH_DISPATCH:
5260 if (!(e->flags & EDGE_FALLTHRU))
5261 redirect_eh_dispatch_edge (stmt, e, dest);
5262 break;
5264 case GIMPLE_TRANSACTION:
5265 /* The ABORT edge has a stored label associated with it, otherwise
5266 the edges are simply redirectable. */
5267 if (e->flags == 0)
5268 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5269 break;
5271 default:
5272 /* Otherwise it must be a fallthru edge, and we don't need to
5273 do anything besides redirecting it. */
5274 gcc_assert (e->flags & EDGE_FALLTHRU);
5275 break;
5278 /* Update/insert PHI nodes as necessary. */
5280 /* Now update the edges in the CFG. */
5281 e = ssa_redirect_edge (e, dest);
5283 return e;
5286 /* Returns true if it is possible to remove edge E by redirecting
5287 it to the destination of the other edge from E->src. */
5289 static bool
5290 gimple_can_remove_branch_p (const_edge e)
5292 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5293 return false;
5295 return true;
5298 /* Simple wrapper, as we can always redirect fallthru edges. */
5300 static basic_block
5301 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5303 e = gimple_redirect_edge_and_branch (e, dest);
5304 gcc_assert (e);
5306 return NULL;
5310 /* Splits basic block BB after statement STMT (but at least after the
5311 labels). If STMT is NULL, BB is split just after the labels. */
5313 static basic_block
5314 gimple_split_block (basic_block bb, void *stmt)
5316 gimple_stmt_iterator gsi;
5317 gimple_stmt_iterator gsi_tgt;
5318 gimple act;
5319 gimple_seq list;
5320 basic_block new_bb;
5321 edge e;
5322 edge_iterator ei;
5324 new_bb = create_empty_bb (bb);
5326 /* Redirect the outgoing edges. */
5327 new_bb->succs = bb->succs;
5328 bb->succs = NULL;
5329 FOR_EACH_EDGE (e, ei, new_bb->succs)
5330 e->src = new_bb;
5332 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5333 stmt = NULL;
5335 /* Move everything from GSI to the new basic block. */
5336 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5338 act = gsi_stmt (gsi);
5339 if (gimple_code (act) == GIMPLE_LABEL)
5340 continue;
5342 if (!stmt)
5343 break;
5345 if (stmt == act)
5347 gsi_next (&gsi);
5348 break;
5352 if (gsi_end_p (gsi))
5353 return new_bb;
5355 /* Split the statement list - avoid re-creating new containers as this
5356 brings ugly quadratic memory consumption in the inliner.
5357 (We are still quadratic since we need to update stmt BB pointers,
5358 sadly.) */
5359 gsi_split_seq_before (&gsi, &list);
5360 set_bb_seq (new_bb, list);
5361 for (gsi_tgt = gsi_start (list);
5362 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5363 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5365 return new_bb;
5369 /* Moves basic block BB after block AFTER. */
5371 static bool
5372 gimple_move_block_after (basic_block bb, basic_block after)
5374 if (bb->prev_bb == after)
5375 return true;
5377 unlink_block (bb);
5378 link_block (bb, after);
5380 return true;
5384 /* Return TRUE if block BB has no executable statements, otherwise return
5385 FALSE. */
5387 static bool
5388 gimple_empty_block_p (basic_block bb)
5390 /* BB must have no executable statements. */
5391 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5392 if (phi_nodes (bb))
5393 return false;
5394 if (gsi_end_p (gsi))
5395 return true;
5396 if (is_gimple_debug (gsi_stmt (gsi)))
5397 gsi_next_nondebug (&gsi);
5398 return gsi_end_p (gsi);
5402 /* Split a basic block if it ends with a conditional branch and if the
5403 other part of the block is not empty. */
5405 static basic_block
5406 gimple_split_block_before_cond_jump (basic_block bb)
5408 gimple last, split_point;
5409 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5410 if (gsi_end_p (gsi))
5411 return NULL;
5412 last = gsi_stmt (gsi);
5413 if (gimple_code (last) != GIMPLE_COND
5414 && gimple_code (last) != GIMPLE_SWITCH)
5415 return NULL;
5416 gsi_prev_nondebug (&gsi);
5417 split_point = gsi_stmt (gsi);
5418 return split_block (bb, split_point)->dest;
5422 /* Return true if basic_block can be duplicated. */
5424 static bool
5425 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5427 return true;
5430 /* Create a duplicate of the basic block BB. NOTE: This does not
5431 preserve SSA form. */
5433 static basic_block
5434 gimple_duplicate_bb (basic_block bb)
5436 basic_block new_bb;
5437 gimple_stmt_iterator gsi, gsi_tgt;
5438 gimple_seq phis = phi_nodes (bb);
5439 gimple phi, stmt, copy;
5441 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5443 /* Copy the PHI nodes. We ignore PHI node arguments here because
5444 the incoming edges have not been setup yet. */
5445 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5447 phi = gsi_stmt (gsi);
5448 copy = create_phi_node (NULL_TREE, new_bb);
5449 create_new_def_for (gimple_phi_result (phi), copy,
5450 gimple_phi_result_ptr (copy));
5451 gimple_set_uid (copy, gimple_uid (phi));
5454 gsi_tgt = gsi_start_bb (new_bb);
5455 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5457 def_operand_p def_p;
5458 ssa_op_iter op_iter;
5459 tree lhs;
5461 stmt = gsi_stmt (gsi);
5462 if (gimple_code (stmt) == GIMPLE_LABEL)
5463 continue;
5465 /* Don't duplicate label debug stmts. */
5466 if (gimple_debug_bind_p (stmt)
5467 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5468 == LABEL_DECL)
5469 continue;
5471 /* Create a new copy of STMT and duplicate STMT's virtual
5472 operands. */
5473 copy = gimple_copy (stmt);
5474 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5476 maybe_duplicate_eh_stmt (copy, stmt);
5477 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5479 /* When copying around a stmt writing into a local non-user
5480 aggregate, make sure it won't share stack slot with other
5481 vars. */
5482 lhs = gimple_get_lhs (stmt);
5483 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5485 tree base = get_base_address (lhs);
5486 if (base
5487 && (TREE_CODE (base) == VAR_DECL
5488 || TREE_CODE (base) == RESULT_DECL)
5489 && DECL_IGNORED_P (base)
5490 && !TREE_STATIC (base)
5491 && !DECL_EXTERNAL (base)
5492 && (TREE_CODE (base) != VAR_DECL
5493 || !DECL_HAS_VALUE_EXPR_P (base)))
5494 DECL_NONSHAREABLE (base) = 1;
5497 /* Create new names for all the definitions created by COPY and
5498 add replacement mappings for each new name. */
5499 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5500 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5503 return new_bb;
5506 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5508 static void
5509 add_phi_args_after_copy_edge (edge e_copy)
5511 basic_block bb, bb_copy = e_copy->src, dest;
5512 edge e;
5513 edge_iterator ei;
5514 gimple phi, phi_copy;
5515 tree def;
5516 gimple_stmt_iterator psi, psi_copy;
5518 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5519 return;
5521 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5523 if (e_copy->dest->flags & BB_DUPLICATED)
5524 dest = get_bb_original (e_copy->dest);
5525 else
5526 dest = e_copy->dest;
5528 e = find_edge (bb, dest);
5529 if (!e)
5531 /* During loop unrolling the target of the latch edge is copied.
5532 In this case we are not looking for edge to dest, but to
5533 duplicated block whose original was dest. */
5534 FOR_EACH_EDGE (e, ei, bb->succs)
5536 if ((e->dest->flags & BB_DUPLICATED)
5537 && get_bb_original (e->dest) == dest)
5538 break;
5541 gcc_assert (e != NULL);
5544 for (psi = gsi_start_phis (e->dest),
5545 psi_copy = gsi_start_phis (e_copy->dest);
5546 !gsi_end_p (psi);
5547 gsi_next (&psi), gsi_next (&psi_copy))
5549 phi = gsi_stmt (psi);
5550 phi_copy = gsi_stmt (psi_copy);
5551 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5552 add_phi_arg (phi_copy, def, e_copy,
5553 gimple_phi_arg_location_from_edge (phi, e));
5558 /* Basic block BB_COPY was created by code duplication. Add phi node
5559 arguments for edges going out of BB_COPY. The blocks that were
5560 duplicated have BB_DUPLICATED set. */
5562 void
5563 add_phi_args_after_copy_bb (basic_block bb_copy)
5565 edge e_copy;
5566 edge_iterator ei;
5568 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5570 add_phi_args_after_copy_edge (e_copy);
5574 /* Blocks in REGION_COPY array of length N_REGION were created by
5575 duplication of basic blocks. Add phi node arguments for edges
5576 going from these blocks. If E_COPY is not NULL, also add
5577 phi node arguments for its destination.*/
5579 void
5580 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5581 edge e_copy)
5583 unsigned i;
5585 for (i = 0; i < n_region; i++)
5586 region_copy[i]->flags |= BB_DUPLICATED;
5588 for (i = 0; i < n_region; i++)
5589 add_phi_args_after_copy_bb (region_copy[i]);
5590 if (e_copy)
5591 add_phi_args_after_copy_edge (e_copy);
5593 for (i = 0; i < n_region; i++)
5594 region_copy[i]->flags &= ~BB_DUPLICATED;
5597 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5598 important exit edge EXIT. By important we mean that no SSA name defined
5599 inside region is live over the other exit edges of the region. All entry
5600 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5601 to the duplicate of the region. Dominance and loop information is
5602 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5603 UPDATE_DOMINANCE is false then we assume that the caller will update the
5604 dominance information after calling this function. The new basic
5605 blocks are stored to REGION_COPY in the same order as they had in REGION,
5606 provided that REGION_COPY is not NULL.
5607 The function returns false if it is unable to copy the region,
5608 true otherwise. */
5610 bool
5611 gimple_duplicate_sese_region (edge entry, edge exit,
5612 basic_block *region, unsigned n_region,
5613 basic_block *region_copy,
5614 bool update_dominance)
5616 unsigned i;
5617 bool free_region_copy = false, copying_header = false;
5618 struct loop *loop = entry->dest->loop_father;
5619 edge exit_copy;
5620 vec<basic_block> doms;
5621 edge redirected;
5622 int total_freq = 0, entry_freq = 0;
5623 gcov_type total_count = 0, entry_count = 0;
5625 if (!can_copy_bbs_p (region, n_region))
5626 return false;
5628 /* Some sanity checking. Note that we do not check for all possible
5629 missuses of the functions. I.e. if you ask to copy something weird,
5630 it will work, but the state of structures probably will not be
5631 correct. */
5632 for (i = 0; i < n_region; i++)
5634 /* We do not handle subloops, i.e. all the blocks must belong to the
5635 same loop. */
5636 if (region[i]->loop_father != loop)
5637 return false;
5639 if (region[i] != entry->dest
5640 && region[i] == loop->header)
5641 return false;
5644 set_loop_copy (loop, loop);
5646 /* In case the function is used for loop header copying (which is the primary
5647 use), ensure that EXIT and its copy will be new latch and entry edges. */
5648 if (loop->header == entry->dest)
5650 copying_header = true;
5651 set_loop_copy (loop, loop_outer (loop));
5653 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5654 return false;
5656 for (i = 0; i < n_region; i++)
5657 if (region[i] != exit->src
5658 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5659 return false;
5662 if (!region_copy)
5664 region_copy = XNEWVEC (basic_block, n_region);
5665 free_region_copy = true;
5668 initialize_original_copy_tables ();
5670 /* Record blocks outside the region that are dominated by something
5671 inside. */
5672 if (update_dominance)
5674 doms.create (0);
5675 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5678 if (entry->dest->count)
5680 total_count = entry->dest->count;
5681 entry_count = entry->count;
5682 /* Fix up corner cases, to avoid division by zero or creation of negative
5683 frequencies. */
5684 if (entry_count > total_count)
5685 entry_count = total_count;
5687 else
5689 total_freq = entry->dest->frequency;
5690 entry_freq = EDGE_FREQUENCY (entry);
5691 /* Fix up corner cases, to avoid division by zero or creation of negative
5692 frequencies. */
5693 if (total_freq == 0)
5694 total_freq = 1;
5695 else if (entry_freq > total_freq)
5696 entry_freq = total_freq;
5699 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5700 split_edge_bb_loc (entry), update_dominance);
5701 if (total_count)
5703 scale_bbs_frequencies_gcov_type (region, n_region,
5704 total_count - entry_count,
5705 total_count);
5706 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5707 total_count);
5709 else
5711 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5712 total_freq);
5713 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5716 if (copying_header)
5718 loop->header = exit->dest;
5719 loop->latch = exit->src;
5722 /* Redirect the entry and add the phi node arguments. */
5723 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5724 gcc_assert (redirected != NULL);
5725 flush_pending_stmts (entry);
5727 /* Concerning updating of dominators: We must recount dominators
5728 for entry block and its copy. Anything that is outside of the
5729 region, but was dominated by something inside needs recounting as
5730 well. */
5731 if (update_dominance)
5733 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5734 doms.safe_push (get_bb_original (entry->dest));
5735 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5736 doms.release ();
5739 /* Add the other PHI node arguments. */
5740 add_phi_args_after_copy (region_copy, n_region, NULL);
5742 if (free_region_copy)
5743 free (region_copy);
5745 free_original_copy_tables ();
5746 return true;
5749 /* Checks if BB is part of the region defined by N_REGION BBS. */
5750 static bool
5751 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5753 unsigned int n;
5755 for (n = 0; n < n_region; n++)
5757 if (bb == bbs[n])
5758 return true;
5760 return false;
5763 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5764 are stored to REGION_COPY in the same order in that they appear
5765 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5766 the region, EXIT an exit from it. The condition guarding EXIT
5767 is moved to ENTRY. Returns true if duplication succeeds, false
5768 otherwise.
5770 For example,
5772 some_code;
5773 if (cond)
5775 else
5778 is transformed to
5780 if (cond)
5782 some_code;
5785 else
5787 some_code;
5792 bool
5793 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5794 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5795 basic_block *region_copy ATTRIBUTE_UNUSED)
5797 unsigned i;
5798 bool free_region_copy = false;
5799 struct loop *loop = exit->dest->loop_father;
5800 struct loop *orig_loop = entry->dest->loop_father;
5801 basic_block switch_bb, entry_bb, nentry_bb;
5802 vec<basic_block> doms;
5803 int total_freq = 0, exit_freq = 0;
5804 gcov_type total_count = 0, exit_count = 0;
5805 edge exits[2], nexits[2], e;
5806 gimple_stmt_iterator gsi;
5807 gimple cond_stmt;
5808 edge sorig, snew;
5809 basic_block exit_bb;
5810 gimple_stmt_iterator psi;
5811 gimple phi;
5812 tree def;
5813 struct loop *target, *aloop, *cloop;
5815 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5816 exits[0] = exit;
5817 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5819 if (!can_copy_bbs_p (region, n_region))
5820 return false;
5822 initialize_original_copy_tables ();
5823 set_loop_copy (orig_loop, loop);
5825 target= loop;
5826 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5828 if (bb_part_of_region_p (aloop->header, region, n_region))
5830 cloop = duplicate_loop (aloop, target);
5831 duplicate_subloops (aloop, cloop);
5835 if (!region_copy)
5837 region_copy = XNEWVEC (basic_block, n_region);
5838 free_region_copy = true;
5841 gcc_assert (!need_ssa_update_p (cfun));
5843 /* Record blocks outside the region that are dominated by something
5844 inside. */
5845 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5847 if (exit->src->count)
5849 total_count = exit->src->count;
5850 exit_count = exit->count;
5851 /* Fix up corner cases, to avoid division by zero or creation of negative
5852 frequencies. */
5853 if (exit_count > total_count)
5854 exit_count = total_count;
5856 else
5858 total_freq = exit->src->frequency;
5859 exit_freq = EDGE_FREQUENCY (exit);
5860 /* Fix up corner cases, to avoid division by zero or creation of negative
5861 frequencies. */
5862 if (total_freq == 0)
5863 total_freq = 1;
5864 if (exit_freq > total_freq)
5865 exit_freq = total_freq;
5868 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5869 split_edge_bb_loc (exit), true);
5870 if (total_count)
5872 scale_bbs_frequencies_gcov_type (region, n_region,
5873 total_count - exit_count,
5874 total_count);
5875 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5876 total_count);
5878 else
5880 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5881 total_freq);
5882 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5885 /* Create the switch block, and put the exit condition to it. */
5886 entry_bb = entry->dest;
5887 nentry_bb = get_bb_copy (entry_bb);
5888 if (!last_stmt (entry->src)
5889 || !stmt_ends_bb_p (last_stmt (entry->src)))
5890 switch_bb = entry->src;
5891 else
5892 switch_bb = split_edge (entry);
5893 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5895 gsi = gsi_last_bb (switch_bb);
5896 cond_stmt = last_stmt (exit->src);
5897 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5898 cond_stmt = gimple_copy (cond_stmt);
5900 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5902 sorig = single_succ_edge (switch_bb);
5903 sorig->flags = exits[1]->flags;
5904 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5906 /* Register the new edge from SWITCH_BB in loop exit lists. */
5907 rescan_loop_exit (snew, true, false);
5909 /* Add the PHI node arguments. */
5910 add_phi_args_after_copy (region_copy, n_region, snew);
5912 /* Get rid of now superfluous conditions and associated edges (and phi node
5913 arguments). */
5914 exit_bb = exit->dest;
5916 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5917 PENDING_STMT (e) = NULL;
5919 /* The latch of ORIG_LOOP was copied, and so was the backedge
5920 to the original header. We redirect this backedge to EXIT_BB. */
5921 for (i = 0; i < n_region; i++)
5922 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5924 gcc_assert (single_succ_edge (region_copy[i]));
5925 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5926 PENDING_STMT (e) = NULL;
5927 for (psi = gsi_start_phis (exit_bb);
5928 !gsi_end_p (psi);
5929 gsi_next (&psi))
5931 phi = gsi_stmt (psi);
5932 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5933 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5936 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5937 PENDING_STMT (e) = NULL;
5939 /* Anything that is outside of the region, but was dominated by something
5940 inside needs to update dominance info. */
5941 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5942 doms.release ();
5943 /* Update the SSA web. */
5944 update_ssa (TODO_update_ssa);
5946 if (free_region_copy)
5947 free (region_copy);
5949 free_original_copy_tables ();
5950 return true;
5953 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5954 adding blocks when the dominator traversal reaches EXIT. This
5955 function silently assumes that ENTRY strictly dominates EXIT. */
5957 void
5958 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5959 vec<basic_block> *bbs_p)
5961 basic_block son;
5963 for (son = first_dom_son (CDI_DOMINATORS, entry);
5964 son;
5965 son = next_dom_son (CDI_DOMINATORS, son))
5967 bbs_p->safe_push (son);
5968 if (son != exit)
5969 gather_blocks_in_sese_region (son, exit, bbs_p);
5973 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5974 The duplicates are recorded in VARS_MAP. */
5976 static void
5977 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5978 tree to_context)
5980 tree t = *tp, new_t;
5981 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5982 void **loc;
5984 if (DECL_CONTEXT (t) == to_context)
5985 return;
5987 loc = pointer_map_contains (vars_map, t);
5989 if (!loc)
5991 loc = pointer_map_insert (vars_map, t);
5993 if (SSA_VAR_P (t))
5995 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5996 add_local_decl (f, new_t);
5998 else
6000 gcc_assert (TREE_CODE (t) == CONST_DECL);
6001 new_t = copy_node (t);
6003 DECL_CONTEXT (new_t) = to_context;
6005 *loc = new_t;
6007 else
6008 new_t = (tree) *loc;
6010 *tp = new_t;
6014 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6015 VARS_MAP maps old ssa names and var_decls to the new ones. */
6017 static tree
6018 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6019 tree to_context)
6021 void **loc;
6022 tree new_name;
6024 gcc_assert (!virtual_operand_p (name));
6026 loc = pointer_map_contains (vars_map, name);
6028 if (!loc)
6030 tree decl = SSA_NAME_VAR (name);
6031 if (decl)
6033 replace_by_duplicate_decl (&decl, vars_map, to_context);
6034 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6035 decl, SSA_NAME_DEF_STMT (name));
6036 if (SSA_NAME_IS_DEFAULT_DEF (name))
6037 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6038 decl, new_name);
6040 else
6041 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6042 name, SSA_NAME_DEF_STMT (name));
6044 loc = pointer_map_insert (vars_map, name);
6045 *loc = new_name;
6047 else
6048 new_name = (tree) *loc;
6050 return new_name;
6053 struct move_stmt_d
6055 tree orig_block;
6056 tree new_block;
6057 tree from_context;
6058 tree to_context;
6059 struct pointer_map_t *vars_map;
6060 htab_t new_label_map;
6061 struct pointer_map_t *eh_map;
6062 bool remap_decls_p;
6065 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6066 contained in *TP if it has been ORIG_BLOCK previously and change the
6067 DECL_CONTEXT of every local variable referenced in *TP. */
6069 static tree
6070 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6072 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6073 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6074 tree t = *tp;
6076 if (EXPR_P (t))
6078 tree block = TREE_BLOCK (t);
6079 if (block == p->orig_block
6080 || (p->orig_block == NULL_TREE
6081 && block != NULL_TREE))
6082 TREE_SET_BLOCK (t, p->new_block);
6083 #ifdef ENABLE_CHECKING
6084 else if (block != NULL_TREE)
6086 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6087 block = BLOCK_SUPERCONTEXT (block);
6088 gcc_assert (block == p->orig_block);
6090 #endif
6092 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6094 if (TREE_CODE (t) == SSA_NAME)
6095 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6096 else if (TREE_CODE (t) == LABEL_DECL)
6098 if (p->new_label_map)
6100 struct tree_map in, *out;
6101 in.base.from = t;
6102 out = (struct tree_map *)
6103 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6104 if (out)
6105 *tp = t = out->to;
6108 DECL_CONTEXT (t) = p->to_context;
6110 else if (p->remap_decls_p)
6112 /* Replace T with its duplicate. T should no longer appear in the
6113 parent function, so this looks wasteful; however, it may appear
6114 in referenced_vars, and more importantly, as virtual operands of
6115 statements, and in alias lists of other variables. It would be
6116 quite difficult to expunge it from all those places. ??? It might
6117 suffice to do this for addressable variables. */
6118 if ((TREE_CODE (t) == VAR_DECL
6119 && !is_global_var (t))
6120 || TREE_CODE (t) == CONST_DECL)
6121 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6123 *walk_subtrees = 0;
6125 else if (TYPE_P (t))
6126 *walk_subtrees = 0;
6128 return NULL_TREE;
6131 /* Helper for move_stmt_r. Given an EH region number for the source
6132 function, map that to the duplicate EH regio number in the dest. */
6134 static int
6135 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6137 eh_region old_r, new_r;
6138 void **slot;
6140 old_r = get_eh_region_from_number (old_nr);
6141 slot = pointer_map_contains (p->eh_map, old_r);
6142 new_r = (eh_region) *slot;
6144 return new_r->index;
6147 /* Similar, but operate on INTEGER_CSTs. */
6149 static tree
6150 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6152 int old_nr, new_nr;
6154 old_nr = tree_to_shwi (old_t_nr);
6155 new_nr = move_stmt_eh_region_nr (old_nr, p);
6157 return build_int_cst (integer_type_node, new_nr);
6160 /* Like move_stmt_op, but for gimple statements.
6162 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6163 contained in the current statement in *GSI_P and change the
6164 DECL_CONTEXT of every local variable referenced in the current
6165 statement. */
6167 static tree
6168 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6169 struct walk_stmt_info *wi)
6171 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6172 gimple stmt = gsi_stmt (*gsi_p);
6173 tree block = gimple_block (stmt);
6175 if (block == p->orig_block
6176 || (p->orig_block == NULL_TREE
6177 && block != NULL_TREE))
6178 gimple_set_block (stmt, p->new_block);
6180 switch (gimple_code (stmt))
6182 case GIMPLE_CALL:
6183 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6185 tree r, fndecl = gimple_call_fndecl (stmt);
6186 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6187 switch (DECL_FUNCTION_CODE (fndecl))
6189 case BUILT_IN_EH_COPY_VALUES:
6190 r = gimple_call_arg (stmt, 1);
6191 r = move_stmt_eh_region_tree_nr (r, p);
6192 gimple_call_set_arg (stmt, 1, r);
6193 /* FALLTHRU */
6195 case BUILT_IN_EH_POINTER:
6196 case BUILT_IN_EH_FILTER:
6197 r = gimple_call_arg (stmt, 0);
6198 r = move_stmt_eh_region_tree_nr (r, p);
6199 gimple_call_set_arg (stmt, 0, r);
6200 break;
6202 default:
6203 break;
6206 break;
6208 case GIMPLE_RESX:
6210 int r = gimple_resx_region (stmt);
6211 r = move_stmt_eh_region_nr (r, p);
6212 gimple_resx_set_region (stmt, r);
6214 break;
6216 case GIMPLE_EH_DISPATCH:
6218 int r = gimple_eh_dispatch_region (stmt);
6219 r = move_stmt_eh_region_nr (r, p);
6220 gimple_eh_dispatch_set_region (stmt, r);
6222 break;
6224 case GIMPLE_OMP_RETURN:
6225 case GIMPLE_OMP_CONTINUE:
6226 break;
6227 default:
6228 if (is_gimple_omp (stmt))
6230 /* Do not remap variables inside OMP directives. Variables
6231 referenced in clauses and directive header belong to the
6232 parent function and should not be moved into the child
6233 function. */
6234 bool save_remap_decls_p = p->remap_decls_p;
6235 p->remap_decls_p = false;
6236 *handled_ops_p = true;
6238 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6239 move_stmt_op, wi);
6241 p->remap_decls_p = save_remap_decls_p;
6243 break;
6246 return NULL_TREE;
6249 /* Move basic block BB from function CFUN to function DEST_FN. The
6250 block is moved out of the original linked list and placed after
6251 block AFTER in the new list. Also, the block is removed from the
6252 original array of blocks and placed in DEST_FN's array of blocks.
6253 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6254 updated to reflect the moved edges.
6256 The local variables are remapped to new instances, VARS_MAP is used
6257 to record the mapping. */
6259 static void
6260 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6261 basic_block after, bool update_edge_count_p,
6262 struct move_stmt_d *d)
6264 struct control_flow_graph *cfg;
6265 edge_iterator ei;
6266 edge e;
6267 gimple_stmt_iterator si;
6268 unsigned old_len, new_len;
6270 /* Remove BB from dominance structures. */
6271 delete_from_dominance_info (CDI_DOMINATORS, bb);
6273 /* Move BB from its current loop to the copy in the new function. */
6274 if (current_loops)
6276 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6277 if (new_loop)
6278 bb->loop_father = new_loop;
6281 /* Link BB to the new linked list. */
6282 move_block_after (bb, after);
6284 /* Update the edge count in the corresponding flowgraphs. */
6285 if (update_edge_count_p)
6286 FOR_EACH_EDGE (e, ei, bb->succs)
6288 cfun->cfg->x_n_edges--;
6289 dest_cfun->cfg->x_n_edges++;
6292 /* Remove BB from the original basic block array. */
6293 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6294 cfun->cfg->x_n_basic_blocks--;
6296 /* Grow DEST_CFUN's basic block array if needed. */
6297 cfg = dest_cfun->cfg;
6298 cfg->x_n_basic_blocks++;
6299 if (bb->index >= cfg->x_last_basic_block)
6300 cfg->x_last_basic_block = bb->index + 1;
6302 old_len = vec_safe_length (cfg->x_basic_block_info);
6303 if ((unsigned) cfg->x_last_basic_block >= old_len)
6305 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6306 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6309 (*cfg->x_basic_block_info)[bb->index] = bb;
6311 /* Remap the variables in phi nodes. */
6312 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6314 gimple phi = gsi_stmt (si);
6315 use_operand_p use;
6316 tree op = PHI_RESULT (phi);
6317 ssa_op_iter oi;
6318 unsigned i;
6320 if (virtual_operand_p (op))
6322 /* Remove the phi nodes for virtual operands (alias analysis will be
6323 run for the new function, anyway). */
6324 remove_phi_node (&si, true);
6325 continue;
6328 SET_PHI_RESULT (phi,
6329 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6330 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6332 op = USE_FROM_PTR (use);
6333 if (TREE_CODE (op) == SSA_NAME)
6334 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6337 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6339 location_t locus = gimple_phi_arg_location (phi, i);
6340 tree block = LOCATION_BLOCK (locus);
6342 if (locus == UNKNOWN_LOCATION)
6343 continue;
6344 if (d->orig_block == NULL_TREE || block == d->orig_block)
6346 if (d->new_block == NULL_TREE)
6347 locus = LOCATION_LOCUS (locus);
6348 else
6349 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6350 gimple_phi_arg_set_location (phi, i, locus);
6354 gsi_next (&si);
6357 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6359 gimple stmt = gsi_stmt (si);
6360 struct walk_stmt_info wi;
6362 memset (&wi, 0, sizeof (wi));
6363 wi.info = d;
6364 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6366 if (gimple_code (stmt) == GIMPLE_LABEL)
6368 tree label = gimple_label_label (stmt);
6369 int uid = LABEL_DECL_UID (label);
6371 gcc_assert (uid > -1);
6373 old_len = vec_safe_length (cfg->x_label_to_block_map);
6374 if (old_len <= (unsigned) uid)
6376 new_len = 3 * uid / 2 + 1;
6377 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6380 (*cfg->x_label_to_block_map)[uid] = bb;
6381 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6383 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6385 if (uid >= dest_cfun->cfg->last_label_uid)
6386 dest_cfun->cfg->last_label_uid = uid + 1;
6389 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6390 remove_stmt_from_eh_lp_fn (cfun, stmt);
6392 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6393 gimple_remove_stmt_histograms (cfun, stmt);
6395 /* We cannot leave any operands allocated from the operand caches of
6396 the current function. */
6397 free_stmt_operands (stmt);
6398 push_cfun (dest_cfun);
6399 update_stmt (stmt);
6400 pop_cfun ();
6403 FOR_EACH_EDGE (e, ei, bb->succs)
6404 if (e->goto_locus != UNKNOWN_LOCATION)
6406 tree block = LOCATION_BLOCK (e->goto_locus);
6407 if (d->orig_block == NULL_TREE
6408 || block == d->orig_block)
6409 e->goto_locus = d->new_block ?
6410 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6411 LOCATION_LOCUS (e->goto_locus);
6415 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6416 the outermost EH region. Use REGION as the incoming base EH region. */
6418 static eh_region
6419 find_outermost_region_in_block (struct function *src_cfun,
6420 basic_block bb, eh_region region)
6422 gimple_stmt_iterator si;
6424 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6426 gimple stmt = gsi_stmt (si);
6427 eh_region stmt_region;
6428 int lp_nr;
6430 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6431 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6432 if (stmt_region)
6434 if (region == NULL)
6435 region = stmt_region;
6436 else if (stmt_region != region)
6438 region = eh_region_outermost (src_cfun, stmt_region, region);
6439 gcc_assert (region != NULL);
6444 return region;
6447 static tree
6448 new_label_mapper (tree decl, void *data)
6450 htab_t hash = (htab_t) data;
6451 struct tree_map *m;
6452 void **slot;
6454 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6456 m = XNEW (struct tree_map);
6457 m->hash = DECL_UID (decl);
6458 m->base.from = decl;
6459 m->to = create_artificial_label (UNKNOWN_LOCATION);
6460 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6461 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6462 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6464 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6465 gcc_assert (*slot == NULL);
6467 *slot = m;
6469 return m->to;
6472 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6473 subblocks. */
6475 static void
6476 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6477 tree to_context)
6479 tree *tp, t;
6481 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6483 t = *tp;
6484 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6485 continue;
6486 replace_by_duplicate_decl (&t, vars_map, to_context);
6487 if (t != *tp)
6489 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6491 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6492 DECL_HAS_VALUE_EXPR_P (t) = 1;
6494 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6495 *tp = t;
6499 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6500 replace_block_vars_by_duplicates (block, vars_map, to_context);
6503 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6504 from FN1 to FN2. */
6506 static void
6507 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6508 struct loop *loop)
6510 /* Discard it from the old loop array. */
6511 (*get_loops (fn1))[loop->num] = NULL;
6513 /* Place it in the new loop array, assigning it a new number. */
6514 loop->num = number_of_loops (fn2);
6515 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6517 /* Recurse to children. */
6518 for (loop = loop->inner; loop; loop = loop->next)
6519 fixup_loop_arrays_after_move (fn1, fn2, loop);
6522 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6523 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6524 single basic block in the original CFG and the new basic block is
6525 returned. DEST_CFUN must not have a CFG yet.
6527 Note that the region need not be a pure SESE region. Blocks inside
6528 the region may contain calls to abort/exit. The only restriction
6529 is that ENTRY_BB should be the only entry point and it must
6530 dominate EXIT_BB.
6532 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6533 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6534 to the new function.
6536 All local variables referenced in the region are assumed to be in
6537 the corresponding BLOCK_VARS and unexpanded variable lists
6538 associated with DEST_CFUN. */
6540 basic_block
6541 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6542 basic_block exit_bb, tree orig_block)
6544 vec<basic_block> bbs, dom_bbs;
6545 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6546 basic_block after, bb, *entry_pred, *exit_succ, abb;
6547 struct function *saved_cfun = cfun;
6548 int *entry_flag, *exit_flag;
6549 unsigned *entry_prob, *exit_prob;
6550 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6551 edge e;
6552 edge_iterator ei;
6553 htab_t new_label_map;
6554 struct pointer_map_t *vars_map, *eh_map;
6555 struct loop *loop = entry_bb->loop_father;
6556 struct loop *loop0 = get_loop (saved_cfun, 0);
6557 struct move_stmt_d d;
6559 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6560 region. */
6561 gcc_assert (entry_bb != exit_bb
6562 && (!exit_bb
6563 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6565 /* Collect all the blocks in the region. Manually add ENTRY_BB
6566 because it won't be added by dfs_enumerate_from. */
6567 bbs.create (0);
6568 bbs.safe_push (entry_bb);
6569 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6571 /* The blocks that used to be dominated by something in BBS will now be
6572 dominated by the new block. */
6573 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6574 bbs.address (),
6575 bbs.length ());
6577 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6578 the predecessor edges to ENTRY_BB and the successor edges to
6579 EXIT_BB so that we can re-attach them to the new basic block that
6580 will replace the region. */
6581 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6582 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6583 entry_flag = XNEWVEC (int, num_entry_edges);
6584 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6585 i = 0;
6586 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6588 entry_prob[i] = e->probability;
6589 entry_flag[i] = e->flags;
6590 entry_pred[i++] = e->src;
6591 remove_edge (e);
6594 if (exit_bb)
6596 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6597 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6598 exit_flag = XNEWVEC (int, num_exit_edges);
6599 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6600 i = 0;
6601 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6603 exit_prob[i] = e->probability;
6604 exit_flag[i] = e->flags;
6605 exit_succ[i++] = e->dest;
6606 remove_edge (e);
6609 else
6611 num_exit_edges = 0;
6612 exit_succ = NULL;
6613 exit_flag = NULL;
6614 exit_prob = NULL;
6617 /* Switch context to the child function to initialize DEST_FN's CFG. */
6618 gcc_assert (dest_cfun->cfg == NULL);
6619 push_cfun (dest_cfun);
6621 init_empty_tree_cfg ();
6623 /* Initialize EH information for the new function. */
6624 eh_map = NULL;
6625 new_label_map = NULL;
6626 if (saved_cfun->eh)
6628 eh_region region = NULL;
6630 FOR_EACH_VEC_ELT (bbs, i, bb)
6631 region = find_outermost_region_in_block (saved_cfun, bb, region);
6633 init_eh_for_function ();
6634 if (region != NULL)
6636 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6637 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6638 new_label_mapper, new_label_map);
6642 /* Initialize an empty loop tree. */
6643 struct loops *loops = ggc_alloc_cleared_loops ();
6644 init_loops_structure (dest_cfun, loops, 1);
6645 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6646 set_loops_for_fn (dest_cfun, loops);
6648 /* Move the outlined loop tree part. */
6649 num_nodes = bbs.length ();
6650 FOR_EACH_VEC_ELT (bbs, i, bb)
6652 if (bb->loop_father->header == bb)
6654 struct loop *this_loop = bb->loop_father;
6655 struct loop *outer = loop_outer (this_loop);
6656 if (outer == loop
6657 /* If the SESE region contains some bbs ending with
6658 a noreturn call, those are considered to belong
6659 to the outermost loop in saved_cfun, rather than
6660 the entry_bb's loop_father. */
6661 || outer == loop0)
6663 if (outer != loop)
6664 num_nodes -= this_loop->num_nodes;
6665 flow_loop_tree_node_remove (bb->loop_father);
6666 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6667 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6670 else if (bb->loop_father == loop0 && loop0 != loop)
6671 num_nodes--;
6673 /* Remove loop exits from the outlined region. */
6674 if (loops_for_fn (saved_cfun)->exits)
6675 FOR_EACH_EDGE (e, ei, bb->succs)
6677 void **slot = htab_find_slot_with_hash
6678 (loops_for_fn (saved_cfun)->exits, e,
6679 htab_hash_pointer (e), NO_INSERT);
6680 if (slot)
6681 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6686 /* Adjust the number of blocks in the tree root of the outlined part. */
6687 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6689 /* Setup a mapping to be used by move_block_to_fn. */
6690 loop->aux = current_loops->tree_root;
6691 loop0->aux = current_loops->tree_root;
6693 pop_cfun ();
6695 /* Move blocks from BBS into DEST_CFUN. */
6696 gcc_assert (bbs.length () >= 2);
6697 after = dest_cfun->cfg->x_entry_block_ptr;
6698 vars_map = pointer_map_create ();
6700 memset (&d, 0, sizeof (d));
6701 d.orig_block = orig_block;
6702 d.new_block = DECL_INITIAL (dest_cfun->decl);
6703 d.from_context = cfun->decl;
6704 d.to_context = dest_cfun->decl;
6705 d.vars_map = vars_map;
6706 d.new_label_map = new_label_map;
6707 d.eh_map = eh_map;
6708 d.remap_decls_p = true;
6710 FOR_EACH_VEC_ELT (bbs, i, bb)
6712 /* No need to update edge counts on the last block. It has
6713 already been updated earlier when we detached the region from
6714 the original CFG. */
6715 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6716 after = bb;
6719 loop->aux = NULL;
6720 loop0->aux = NULL;
6721 /* Loop sizes are no longer correct, fix them up. */
6722 loop->num_nodes -= num_nodes;
6723 for (struct loop *outer = loop_outer (loop);
6724 outer; outer = loop_outer (outer))
6725 outer->num_nodes -= num_nodes;
6726 loop0->num_nodes -= bbs.length () - num_nodes;
6728 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6730 struct loop *aloop;
6731 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6732 if (aloop != NULL)
6734 if (aloop->simduid)
6736 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6737 d.to_context);
6738 dest_cfun->has_simduid_loops = true;
6740 if (aloop->force_vect)
6741 dest_cfun->has_force_vect_loops = true;
6745 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6746 if (orig_block)
6748 tree block;
6749 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6750 == NULL_TREE);
6751 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6752 = BLOCK_SUBBLOCKS (orig_block);
6753 for (block = BLOCK_SUBBLOCKS (orig_block);
6754 block; block = BLOCK_CHAIN (block))
6755 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6756 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6759 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6760 vars_map, dest_cfun->decl);
6762 if (new_label_map)
6763 htab_delete (new_label_map);
6764 if (eh_map)
6765 pointer_map_destroy (eh_map);
6766 pointer_map_destroy (vars_map);
6768 /* Rewire the entry and exit blocks. The successor to the entry
6769 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6770 the child function. Similarly, the predecessor of DEST_FN's
6771 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6772 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6773 various CFG manipulation function get to the right CFG.
6775 FIXME, this is silly. The CFG ought to become a parameter to
6776 these helpers. */
6777 push_cfun (dest_cfun);
6778 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6779 if (exit_bb)
6780 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6781 pop_cfun ();
6783 /* Back in the original function, the SESE region has disappeared,
6784 create a new basic block in its place. */
6785 bb = create_empty_bb (entry_pred[0]);
6786 if (current_loops)
6787 add_bb_to_loop (bb, loop);
6788 for (i = 0; i < num_entry_edges; i++)
6790 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6791 e->probability = entry_prob[i];
6794 for (i = 0; i < num_exit_edges; i++)
6796 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6797 e->probability = exit_prob[i];
6800 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6801 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6802 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6803 dom_bbs.release ();
6805 if (exit_bb)
6807 free (exit_prob);
6808 free (exit_flag);
6809 free (exit_succ);
6811 free (entry_prob);
6812 free (entry_flag);
6813 free (entry_pred);
6814 bbs.release ();
6816 return bb;
6820 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6823 void
6824 dump_function_to_file (tree fndecl, FILE *file, int flags)
6826 tree arg, var, old_current_fndecl = current_function_decl;
6827 struct function *dsf;
6828 bool ignore_topmost_bind = false, any_var = false;
6829 basic_block bb;
6830 tree chain;
6831 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6832 && decl_is_tm_clone (fndecl));
6833 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6835 current_function_decl = fndecl;
6836 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6838 arg = DECL_ARGUMENTS (fndecl);
6839 while (arg)
6841 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6842 fprintf (file, " ");
6843 print_generic_expr (file, arg, dump_flags);
6844 if (flags & TDF_VERBOSE)
6845 print_node (file, "", arg, 4);
6846 if (DECL_CHAIN (arg))
6847 fprintf (file, ", ");
6848 arg = DECL_CHAIN (arg);
6850 fprintf (file, ")\n");
6852 if (flags & TDF_VERBOSE)
6853 print_node (file, "", fndecl, 2);
6855 dsf = DECL_STRUCT_FUNCTION (fndecl);
6856 if (dsf && (flags & TDF_EH))
6857 dump_eh_tree (file, dsf);
6859 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6861 dump_node (fndecl, TDF_SLIM | flags, file);
6862 current_function_decl = old_current_fndecl;
6863 return;
6866 /* When GIMPLE is lowered, the variables are no longer available in
6867 BIND_EXPRs, so display them separately. */
6868 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6870 unsigned ix;
6871 ignore_topmost_bind = true;
6873 fprintf (file, "{\n");
6874 if (!vec_safe_is_empty (fun->local_decls))
6875 FOR_EACH_LOCAL_DECL (fun, ix, var)
6877 print_generic_decl (file, var, flags);
6878 if (flags & TDF_VERBOSE)
6879 print_node (file, "", var, 4);
6880 fprintf (file, "\n");
6882 any_var = true;
6884 if (gimple_in_ssa_p (cfun))
6885 for (ix = 1; ix < num_ssa_names; ++ix)
6887 tree name = ssa_name (ix);
6888 if (name && !SSA_NAME_VAR (name))
6890 fprintf (file, " ");
6891 print_generic_expr (file, TREE_TYPE (name), flags);
6892 fprintf (file, " ");
6893 print_generic_expr (file, name, flags);
6894 fprintf (file, ";\n");
6896 any_var = true;
6901 if (fun && fun->decl == fndecl
6902 && fun->cfg
6903 && basic_block_info_for_function (fun))
6905 /* If the CFG has been built, emit a CFG-based dump. */
6906 if (!ignore_topmost_bind)
6907 fprintf (file, "{\n");
6909 if (any_var && n_basic_blocks_for_function (fun))
6910 fprintf (file, "\n");
6912 FOR_EACH_BB_FN (bb, fun)
6913 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6915 fprintf (file, "}\n");
6917 else if (DECL_SAVED_TREE (fndecl) == NULL)
6919 /* The function is now in GIMPLE form but the CFG has not been
6920 built yet. Emit the single sequence of GIMPLE statements
6921 that make up its body. */
6922 gimple_seq body = gimple_body (fndecl);
6924 if (gimple_seq_first_stmt (body)
6925 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6926 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6927 print_gimple_seq (file, body, 0, flags);
6928 else
6930 if (!ignore_topmost_bind)
6931 fprintf (file, "{\n");
6933 if (any_var)
6934 fprintf (file, "\n");
6936 print_gimple_seq (file, body, 2, flags);
6937 fprintf (file, "}\n");
6940 else
6942 int indent;
6944 /* Make a tree based dump. */
6945 chain = DECL_SAVED_TREE (fndecl);
6946 if (chain && TREE_CODE (chain) == BIND_EXPR)
6948 if (ignore_topmost_bind)
6950 chain = BIND_EXPR_BODY (chain);
6951 indent = 2;
6953 else
6954 indent = 0;
6956 else
6958 if (!ignore_topmost_bind)
6959 fprintf (file, "{\n");
6960 indent = 2;
6963 if (any_var)
6964 fprintf (file, "\n");
6966 print_generic_stmt_indented (file, chain, flags, indent);
6967 if (ignore_topmost_bind)
6968 fprintf (file, "}\n");
6971 if (flags & TDF_ENUMERATE_LOCALS)
6972 dump_enumerated_decls (file, flags);
6973 fprintf (file, "\n\n");
6975 current_function_decl = old_current_fndecl;
6978 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6980 DEBUG_FUNCTION void
6981 debug_function (tree fn, int flags)
6983 dump_function_to_file (fn, stderr, flags);
6987 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6989 static void
6990 print_pred_bbs (FILE *file, basic_block bb)
6992 edge e;
6993 edge_iterator ei;
6995 FOR_EACH_EDGE (e, ei, bb->preds)
6996 fprintf (file, "bb_%d ", e->src->index);
7000 /* Print on FILE the indexes for the successors of basic_block BB. */
7002 static void
7003 print_succ_bbs (FILE *file, basic_block bb)
7005 edge e;
7006 edge_iterator ei;
7008 FOR_EACH_EDGE (e, ei, bb->succs)
7009 fprintf (file, "bb_%d ", e->dest->index);
7012 /* Print to FILE the basic block BB following the VERBOSITY level. */
7014 void
7015 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7017 char *s_indent = (char *) alloca ((size_t) indent + 1);
7018 memset ((void *) s_indent, ' ', (size_t) indent);
7019 s_indent[indent] = '\0';
7021 /* Print basic_block's header. */
7022 if (verbosity >= 2)
7024 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7025 print_pred_bbs (file, bb);
7026 fprintf (file, "}, succs = {");
7027 print_succ_bbs (file, bb);
7028 fprintf (file, "})\n");
7031 /* Print basic_block's body. */
7032 if (verbosity >= 3)
7034 fprintf (file, "%s {\n", s_indent);
7035 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7036 fprintf (file, "%s }\n", s_indent);
7040 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7042 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7043 VERBOSITY level this outputs the contents of the loop, or just its
7044 structure. */
7046 static void
7047 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7049 char *s_indent;
7050 basic_block bb;
7052 if (loop == NULL)
7053 return;
7055 s_indent = (char *) alloca ((size_t) indent + 1);
7056 memset ((void *) s_indent, ' ', (size_t) indent);
7057 s_indent[indent] = '\0';
7059 /* Print loop's header. */
7060 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7061 if (loop->header)
7062 fprintf (file, "header = %d", loop->header->index);
7063 else
7065 fprintf (file, "deleted)\n");
7066 return;
7068 if (loop->latch)
7069 fprintf (file, ", latch = %d", loop->latch->index);
7070 else
7071 fprintf (file, ", multiple latches");
7072 fprintf (file, ", niter = ");
7073 print_generic_expr (file, loop->nb_iterations, 0);
7075 if (loop->any_upper_bound)
7077 fprintf (file, ", upper_bound = ");
7078 print_decu (loop->nb_iterations_upper_bound, file);
7081 if (loop->any_estimate)
7083 fprintf (file, ", estimate = ");
7084 print_decu (loop->nb_iterations_estimate, file);
7086 fprintf (file, ")\n");
7088 /* Print loop's body. */
7089 if (verbosity >= 1)
7091 fprintf (file, "%s{\n", s_indent);
7092 FOR_EACH_BB (bb)
7093 if (bb->loop_father == loop)
7094 print_loops_bb (file, bb, indent, verbosity);
7096 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7097 fprintf (file, "%s}\n", s_indent);
7101 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7102 spaces. Following VERBOSITY level this outputs the contents of the
7103 loop, or just its structure. */
7105 static void
7106 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7107 int verbosity)
7109 if (loop == NULL)
7110 return;
7112 print_loop (file, loop, indent, verbosity);
7113 print_loop_and_siblings (file, loop->next, indent, verbosity);
7116 /* Follow a CFG edge from the entry point of the program, and on entry
7117 of a loop, pretty print the loop structure on FILE. */
7119 void
7120 print_loops (FILE *file, int verbosity)
7122 basic_block bb;
7124 bb = ENTRY_BLOCK_PTR;
7125 if (bb && bb->loop_father)
7126 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7129 /* Dump a loop. */
7131 DEBUG_FUNCTION void
7132 debug (struct loop &ref)
7134 print_loop (stderr, &ref, 0, /*verbosity*/0);
7137 DEBUG_FUNCTION void
7138 debug (struct loop *ptr)
7140 if (ptr)
7141 debug (*ptr);
7142 else
7143 fprintf (stderr, "<nil>\n");
7146 /* Dump a loop verbosely. */
7148 DEBUG_FUNCTION void
7149 debug_verbose (struct loop &ref)
7151 print_loop (stderr, &ref, 0, /*verbosity*/3);
7154 DEBUG_FUNCTION void
7155 debug_verbose (struct loop *ptr)
7157 if (ptr)
7158 debug (*ptr);
7159 else
7160 fprintf (stderr, "<nil>\n");
7164 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7166 DEBUG_FUNCTION void
7167 debug_loops (int verbosity)
7169 print_loops (stderr, verbosity);
7172 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7174 DEBUG_FUNCTION void
7175 debug_loop (struct loop *loop, int verbosity)
7177 print_loop (stderr, loop, 0, verbosity);
7180 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7181 level. */
7183 DEBUG_FUNCTION void
7184 debug_loop_num (unsigned num, int verbosity)
7186 debug_loop (get_loop (cfun, num), verbosity);
7189 /* Return true if BB ends with a call, possibly followed by some
7190 instructions that must stay with the call. Return false,
7191 otherwise. */
7193 static bool
7194 gimple_block_ends_with_call_p (basic_block bb)
7196 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7197 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7201 /* Return true if BB ends with a conditional branch. Return false,
7202 otherwise. */
7204 static bool
7205 gimple_block_ends_with_condjump_p (const_basic_block bb)
7207 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7208 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7212 /* Return true if we need to add fake edge to exit at statement T.
7213 Helper function for gimple_flow_call_edges_add. */
7215 static bool
7216 need_fake_edge_p (gimple t)
7218 tree fndecl = NULL_TREE;
7219 int call_flags = 0;
7221 /* NORETURN and LONGJMP calls already have an edge to exit.
7222 CONST and PURE calls do not need one.
7223 We don't currently check for CONST and PURE here, although
7224 it would be a good idea, because those attributes are
7225 figured out from the RTL in mark_constant_function, and
7226 the counter incrementation code from -fprofile-arcs
7227 leads to different results from -fbranch-probabilities. */
7228 if (is_gimple_call (t))
7230 fndecl = gimple_call_fndecl (t);
7231 call_flags = gimple_call_flags (t);
7234 if (is_gimple_call (t)
7235 && fndecl
7236 && DECL_BUILT_IN (fndecl)
7237 && (call_flags & ECF_NOTHROW)
7238 && !(call_flags & ECF_RETURNS_TWICE)
7239 /* fork() doesn't really return twice, but the effect of
7240 wrapping it in __gcov_fork() which calls __gcov_flush()
7241 and clears the counters before forking has the same
7242 effect as returning twice. Force a fake edge. */
7243 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7244 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7245 return false;
7247 if (is_gimple_call (t))
7249 edge_iterator ei;
7250 edge e;
7251 basic_block bb;
7253 if (!(call_flags & ECF_NORETURN))
7254 return true;
7256 bb = gimple_bb (t);
7257 FOR_EACH_EDGE (e, ei, bb->succs)
7258 if ((e->flags & EDGE_FAKE) == 0)
7259 return true;
7262 if (gimple_code (t) == GIMPLE_ASM
7263 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7264 return true;
7266 return false;
7270 /* Add fake edges to the function exit for any non constant and non
7271 noreturn calls (or noreturn calls with EH/abnormal edges),
7272 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7273 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7274 that were split.
7276 The goal is to expose cases in which entering a basic block does
7277 not imply that all subsequent instructions must be executed. */
7279 static int
7280 gimple_flow_call_edges_add (sbitmap blocks)
7282 int i;
7283 int blocks_split = 0;
7284 int last_bb = last_basic_block;
7285 bool check_last_block = false;
7287 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7288 return 0;
7290 if (! blocks)
7291 check_last_block = true;
7292 else
7293 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7295 /* In the last basic block, before epilogue generation, there will be
7296 a fallthru edge to EXIT. Special care is required if the last insn
7297 of the last basic block is a call because make_edge folds duplicate
7298 edges, which would result in the fallthru edge also being marked
7299 fake, which would result in the fallthru edge being removed by
7300 remove_fake_edges, which would result in an invalid CFG.
7302 Moreover, we can't elide the outgoing fake edge, since the block
7303 profiler needs to take this into account in order to solve the minimal
7304 spanning tree in the case that the call doesn't return.
7306 Handle this by adding a dummy instruction in a new last basic block. */
7307 if (check_last_block)
7309 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7310 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7311 gimple t = NULL;
7313 if (!gsi_end_p (gsi))
7314 t = gsi_stmt (gsi);
7316 if (t && need_fake_edge_p (t))
7318 edge e;
7320 e = find_edge (bb, EXIT_BLOCK_PTR);
7321 if (e)
7323 gsi_insert_on_edge (e, gimple_build_nop ());
7324 gsi_commit_edge_inserts ();
7329 /* Now add fake edges to the function exit for any non constant
7330 calls since there is no way that we can determine if they will
7331 return or not... */
7332 for (i = 0; i < last_bb; i++)
7334 basic_block bb = BASIC_BLOCK (i);
7335 gimple_stmt_iterator gsi;
7336 gimple stmt, last_stmt;
7338 if (!bb)
7339 continue;
7341 if (blocks && !bitmap_bit_p (blocks, i))
7342 continue;
7344 gsi = gsi_last_nondebug_bb (bb);
7345 if (!gsi_end_p (gsi))
7347 last_stmt = gsi_stmt (gsi);
7350 stmt = gsi_stmt (gsi);
7351 if (need_fake_edge_p (stmt))
7353 edge e;
7355 /* The handling above of the final block before the
7356 epilogue should be enough to verify that there is
7357 no edge to the exit block in CFG already.
7358 Calling make_edge in such case would cause us to
7359 mark that edge as fake and remove it later. */
7360 #ifdef ENABLE_CHECKING
7361 if (stmt == last_stmt)
7363 e = find_edge (bb, EXIT_BLOCK_PTR);
7364 gcc_assert (e == NULL);
7366 #endif
7368 /* Note that the following may create a new basic block
7369 and renumber the existing basic blocks. */
7370 if (stmt != last_stmt)
7372 e = split_block (bb, stmt);
7373 if (e)
7374 blocks_split++;
7376 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7378 gsi_prev (&gsi);
7380 while (!gsi_end_p (gsi));
7384 if (blocks_split)
7385 verify_flow_info ();
7387 return blocks_split;
7390 /* Removes edge E and all the blocks dominated by it, and updates dominance
7391 information. The IL in E->src needs to be updated separately.
7392 If dominance info is not available, only the edge E is removed.*/
7394 void
7395 remove_edge_and_dominated_blocks (edge e)
7397 vec<basic_block> bbs_to_remove = vNULL;
7398 vec<basic_block> bbs_to_fix_dom = vNULL;
7399 bitmap df, df_idom;
7400 edge f;
7401 edge_iterator ei;
7402 bool none_removed = false;
7403 unsigned i;
7404 basic_block bb, dbb;
7405 bitmap_iterator bi;
7407 if (!dom_info_available_p (CDI_DOMINATORS))
7409 remove_edge (e);
7410 return;
7413 /* No updating is needed for edges to exit. */
7414 if (e->dest == EXIT_BLOCK_PTR)
7416 if (cfgcleanup_altered_bbs)
7417 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7418 remove_edge (e);
7419 return;
7422 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7423 that is not dominated by E->dest, then this set is empty. Otherwise,
7424 all the basic blocks dominated by E->dest are removed.
7426 Also, to DF_IDOM we store the immediate dominators of the blocks in
7427 the dominance frontier of E (i.e., of the successors of the
7428 removed blocks, if there are any, and of E->dest otherwise). */
7429 FOR_EACH_EDGE (f, ei, e->dest->preds)
7431 if (f == e)
7432 continue;
7434 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7436 none_removed = true;
7437 break;
7441 df = BITMAP_ALLOC (NULL);
7442 df_idom = BITMAP_ALLOC (NULL);
7444 if (none_removed)
7445 bitmap_set_bit (df_idom,
7446 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7447 else
7449 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7450 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7452 FOR_EACH_EDGE (f, ei, bb->succs)
7454 if (f->dest != EXIT_BLOCK_PTR)
7455 bitmap_set_bit (df, f->dest->index);
7458 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7459 bitmap_clear_bit (df, bb->index);
7461 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7463 bb = BASIC_BLOCK (i);
7464 bitmap_set_bit (df_idom,
7465 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7469 if (cfgcleanup_altered_bbs)
7471 /* Record the set of the altered basic blocks. */
7472 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7473 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7476 /* Remove E and the cancelled blocks. */
7477 if (none_removed)
7478 remove_edge (e);
7479 else
7481 /* Walk backwards so as to get a chance to substitute all
7482 released DEFs into debug stmts. See
7483 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7484 details. */
7485 for (i = bbs_to_remove.length (); i-- > 0; )
7486 delete_basic_block (bbs_to_remove[i]);
7489 /* Update the dominance information. The immediate dominator may change only
7490 for blocks whose immediate dominator belongs to DF_IDOM:
7492 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7493 removal. Let Z the arbitrary block such that idom(Z) = Y and
7494 Z dominates X after the removal. Before removal, there exists a path P
7495 from Y to X that avoids Z. Let F be the last edge on P that is
7496 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7497 dominates W, and because of P, Z does not dominate W), and W belongs to
7498 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7499 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7501 bb = BASIC_BLOCK (i);
7502 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7503 dbb;
7504 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7505 bbs_to_fix_dom.safe_push (dbb);
7508 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7510 BITMAP_FREE (df);
7511 BITMAP_FREE (df_idom);
7512 bbs_to_remove.release ();
7513 bbs_to_fix_dom.release ();
7516 /* Purge dead EH edges from basic block BB. */
7518 bool
7519 gimple_purge_dead_eh_edges (basic_block bb)
7521 bool changed = false;
7522 edge e;
7523 edge_iterator ei;
7524 gimple stmt = last_stmt (bb);
7526 if (stmt && stmt_can_throw_internal (stmt))
7527 return false;
7529 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7531 if (e->flags & EDGE_EH)
7533 remove_edge_and_dominated_blocks (e);
7534 changed = true;
7536 else
7537 ei_next (&ei);
7540 return changed;
7543 /* Purge dead EH edges from basic block listed in BLOCKS. */
7545 bool
7546 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7548 bool changed = false;
7549 unsigned i;
7550 bitmap_iterator bi;
7552 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7554 basic_block bb = BASIC_BLOCK (i);
7556 /* Earlier gimple_purge_dead_eh_edges could have removed
7557 this basic block already. */
7558 gcc_assert (bb || changed);
7559 if (bb != NULL)
7560 changed |= gimple_purge_dead_eh_edges (bb);
7563 return changed;
7566 /* Purge dead abnormal call edges from basic block BB. */
7568 bool
7569 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7571 bool changed = false;
7572 edge e;
7573 edge_iterator ei;
7574 gimple stmt = last_stmt (bb);
7576 if (!cfun->has_nonlocal_label
7577 && !cfun->calls_setjmp)
7578 return false;
7580 if (stmt && stmt_can_make_abnormal_goto (stmt))
7581 return false;
7583 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7585 if (e->flags & EDGE_ABNORMAL)
7587 if (e->flags & EDGE_FALLTHRU)
7588 e->flags &= ~EDGE_ABNORMAL;
7589 else
7590 remove_edge_and_dominated_blocks (e);
7591 changed = true;
7593 else
7594 ei_next (&ei);
7597 return changed;
7600 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7602 bool
7603 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7605 bool changed = false;
7606 unsigned i;
7607 bitmap_iterator bi;
7609 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7611 basic_block bb = BASIC_BLOCK (i);
7613 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7614 this basic block already. */
7615 gcc_assert (bb || changed);
7616 if (bb != NULL)
7617 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7620 return changed;
7623 /* This function is called whenever a new edge is created or
7624 redirected. */
7626 static void
7627 gimple_execute_on_growing_pred (edge e)
7629 basic_block bb = e->dest;
7631 if (!gimple_seq_empty_p (phi_nodes (bb)))
7632 reserve_phi_args_for_new_edge (bb);
7635 /* This function is called immediately before edge E is removed from
7636 the edge vector E->dest->preds. */
7638 static void
7639 gimple_execute_on_shrinking_pred (edge e)
7641 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7642 remove_phi_args (e);
7645 /*---------------------------------------------------------------------------
7646 Helper functions for Loop versioning
7647 ---------------------------------------------------------------------------*/
7649 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7650 of 'first'. Both of them are dominated by 'new_head' basic block. When
7651 'new_head' was created by 'second's incoming edge it received phi arguments
7652 on the edge by split_edge(). Later, additional edge 'e' was created to
7653 connect 'new_head' and 'first'. Now this routine adds phi args on this
7654 additional edge 'e' that new_head to second edge received as part of edge
7655 splitting. */
7657 static void
7658 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7659 basic_block new_head, edge e)
7661 gimple phi1, phi2;
7662 gimple_stmt_iterator psi1, psi2;
7663 tree def;
7664 edge e2 = find_edge (new_head, second);
7666 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7667 edge, we should always have an edge from NEW_HEAD to SECOND. */
7668 gcc_assert (e2 != NULL);
7670 /* Browse all 'second' basic block phi nodes and add phi args to
7671 edge 'e' for 'first' head. PHI args are always in correct order. */
7673 for (psi2 = gsi_start_phis (second),
7674 psi1 = gsi_start_phis (first);
7675 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7676 gsi_next (&psi2), gsi_next (&psi1))
7678 phi1 = gsi_stmt (psi1);
7679 phi2 = gsi_stmt (psi2);
7680 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7681 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7686 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7687 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7688 the destination of the ELSE part. */
7690 static void
7691 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7692 basic_block second_head ATTRIBUTE_UNUSED,
7693 basic_block cond_bb, void *cond_e)
7695 gimple_stmt_iterator gsi;
7696 gimple new_cond_expr;
7697 tree cond_expr = (tree) cond_e;
7698 edge e0;
7700 /* Build new conditional expr */
7701 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7702 NULL_TREE, NULL_TREE);
7704 /* Add new cond in cond_bb. */
7705 gsi = gsi_last_bb (cond_bb);
7706 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7708 /* Adjust edges appropriately to connect new head with first head
7709 as well as second head. */
7710 e0 = single_succ_edge (cond_bb);
7711 e0->flags &= ~EDGE_FALLTHRU;
7712 e0->flags |= EDGE_FALSE_VALUE;
7716 /* Do book-keeping of basic block BB for the profile consistency checker.
7717 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7718 then do post-pass accounting. Store the counting in RECORD. */
7719 static void
7720 gimple_account_profile_record (basic_block bb, int after_pass,
7721 struct profile_record *record)
7723 gimple_stmt_iterator i;
7724 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7726 record->size[after_pass]
7727 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7728 if (profile_status == PROFILE_READ)
7729 record->time[after_pass]
7730 += estimate_num_insns (gsi_stmt (i),
7731 &eni_time_weights) * bb->count;
7732 else if (profile_status == PROFILE_GUESSED)
7733 record->time[after_pass]
7734 += estimate_num_insns (gsi_stmt (i),
7735 &eni_time_weights) * bb->frequency;
7739 struct cfg_hooks gimple_cfg_hooks = {
7740 "gimple",
7741 gimple_verify_flow_info,
7742 gimple_dump_bb, /* dump_bb */
7743 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7744 create_bb, /* create_basic_block */
7745 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7746 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7747 gimple_can_remove_branch_p, /* can_remove_branch_p */
7748 remove_bb, /* delete_basic_block */
7749 gimple_split_block, /* split_block */
7750 gimple_move_block_after, /* move_block_after */
7751 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7752 gimple_merge_blocks, /* merge_blocks */
7753 gimple_predict_edge, /* predict_edge */
7754 gimple_predicted_by_p, /* predicted_by_p */
7755 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7756 gimple_duplicate_bb, /* duplicate_block */
7757 gimple_split_edge, /* split_edge */
7758 gimple_make_forwarder_block, /* make_forward_block */
7759 NULL, /* tidy_fallthru_edge */
7760 NULL, /* force_nonfallthru */
7761 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7762 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7763 gimple_flow_call_edges_add, /* flow_call_edges_add */
7764 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7765 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7766 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7767 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7768 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7769 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7770 flush_pending_stmts, /* flush_pending_stmts */
7771 gimple_empty_block_p, /* block_empty_p */
7772 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7773 gimple_account_profile_record,
7777 /* Split all critical edges. */
7779 static unsigned int
7780 split_critical_edges (void)
7782 basic_block bb;
7783 edge e;
7784 edge_iterator ei;
7786 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7787 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7788 mappings around the calls to split_edge. */
7789 start_recording_case_labels ();
7790 FOR_ALL_BB (bb)
7792 FOR_EACH_EDGE (e, ei, bb->succs)
7794 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7795 split_edge (e);
7796 /* PRE inserts statements to edges and expects that
7797 since split_critical_edges was done beforehand, committing edge
7798 insertions will not split more edges. In addition to critical
7799 edges we must split edges that have multiple successors and
7800 end by control flow statements, such as RESX.
7801 Go ahead and split them too. This matches the logic in
7802 gimple_find_edge_insert_loc. */
7803 else if ((!single_pred_p (e->dest)
7804 || !gimple_seq_empty_p (phi_nodes (e->dest))
7805 || e->dest == EXIT_BLOCK_PTR)
7806 && e->src != ENTRY_BLOCK_PTR
7807 && !(e->flags & EDGE_ABNORMAL))
7809 gimple_stmt_iterator gsi;
7811 gsi = gsi_last_bb (e->src);
7812 if (!gsi_end_p (gsi)
7813 && stmt_ends_bb_p (gsi_stmt (gsi))
7814 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7815 && !gimple_call_builtin_p (gsi_stmt (gsi),
7816 BUILT_IN_RETURN)))
7817 split_edge (e);
7821 end_recording_case_labels ();
7822 return 0;
7825 namespace {
7827 const pass_data pass_data_split_crit_edges =
7829 GIMPLE_PASS, /* type */
7830 "crited", /* name */
7831 OPTGROUP_NONE, /* optinfo_flags */
7832 false, /* has_gate */
7833 true, /* has_execute */
7834 TV_TREE_SPLIT_EDGES, /* tv_id */
7835 PROP_cfg, /* properties_required */
7836 PROP_no_crit_edges, /* properties_provided */
7837 0, /* properties_destroyed */
7838 0, /* todo_flags_start */
7839 TODO_verify_flow, /* todo_flags_finish */
7842 class pass_split_crit_edges : public gimple_opt_pass
7844 public:
7845 pass_split_crit_edges (gcc::context *ctxt)
7846 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7849 /* opt_pass methods: */
7850 unsigned int execute () { return split_critical_edges (); }
7852 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7853 }; // class pass_split_crit_edges
7855 } // anon namespace
7857 gimple_opt_pass *
7858 make_pass_split_crit_edges (gcc::context *ctxt)
7860 return new pass_split_crit_edges (ctxt);
7864 /* Build a ternary operation and gimplify it. Emit code before GSI.
7865 Return the gimple_val holding the result. */
7867 tree
7868 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7869 tree type, tree a, tree b, tree c)
7871 tree ret;
7872 location_t loc = gimple_location (gsi_stmt (*gsi));
7874 ret = fold_build3_loc (loc, code, type, a, b, c);
7875 STRIP_NOPS (ret);
7877 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7878 GSI_SAME_STMT);
7881 /* Build a binary operation and gimplify it. Emit code before GSI.
7882 Return the gimple_val holding the result. */
7884 tree
7885 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7886 tree type, tree a, tree b)
7888 tree ret;
7890 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7891 STRIP_NOPS (ret);
7893 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7894 GSI_SAME_STMT);
7897 /* Build a unary operation and gimplify it. Emit code before GSI.
7898 Return the gimple_val holding the result. */
7900 tree
7901 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7902 tree a)
7904 tree ret;
7906 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7907 STRIP_NOPS (ret);
7909 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7910 GSI_SAME_STMT);
7915 /* Emit return warnings. */
7917 static unsigned int
7918 execute_warn_function_return (void)
7920 source_location location;
7921 gimple last;
7922 edge e;
7923 edge_iterator ei;
7925 if (!targetm.warn_func_return (cfun->decl))
7926 return 0;
7928 /* If we have a path to EXIT, then we do return. */
7929 if (TREE_THIS_VOLATILE (cfun->decl)
7930 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7932 location = UNKNOWN_LOCATION;
7933 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7935 last = last_stmt (e->src);
7936 if ((gimple_code (last) == GIMPLE_RETURN
7937 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7938 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7939 break;
7941 if (location == UNKNOWN_LOCATION)
7942 location = cfun->function_end_locus;
7943 warning_at (location, 0, "%<noreturn%> function does return");
7946 /* If we see "return;" in some basic block, then we do reach the end
7947 without returning a value. */
7948 else if (warn_return_type
7949 && !TREE_NO_WARNING (cfun->decl)
7950 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7951 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7953 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7955 gimple last = last_stmt (e->src);
7956 if (gimple_code (last) == GIMPLE_RETURN
7957 && gimple_return_retval (last) == NULL
7958 && !gimple_no_warning_p (last))
7960 location = gimple_location (last);
7961 if (location == UNKNOWN_LOCATION)
7962 location = cfun->function_end_locus;
7963 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7964 TREE_NO_WARNING (cfun->decl) = 1;
7965 break;
7969 return 0;
7973 /* Given a basic block B which ends with a conditional and has
7974 precisely two successors, determine which of the edges is taken if
7975 the conditional is true and which is taken if the conditional is
7976 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7978 void
7979 extract_true_false_edges_from_block (basic_block b,
7980 edge *true_edge,
7981 edge *false_edge)
7983 edge e = EDGE_SUCC (b, 0);
7985 if (e->flags & EDGE_TRUE_VALUE)
7987 *true_edge = e;
7988 *false_edge = EDGE_SUCC (b, 1);
7990 else
7992 *false_edge = e;
7993 *true_edge = EDGE_SUCC (b, 1);
7997 namespace {
7999 const pass_data pass_data_warn_function_return =
8001 GIMPLE_PASS, /* type */
8002 "*warn_function_return", /* name */
8003 OPTGROUP_NONE, /* optinfo_flags */
8004 false, /* has_gate */
8005 true, /* has_execute */
8006 TV_NONE, /* tv_id */
8007 PROP_cfg, /* properties_required */
8008 0, /* properties_provided */
8009 0, /* properties_destroyed */
8010 0, /* todo_flags_start */
8011 0, /* todo_flags_finish */
8014 class pass_warn_function_return : public gimple_opt_pass
8016 public:
8017 pass_warn_function_return (gcc::context *ctxt)
8018 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8021 /* opt_pass methods: */
8022 unsigned int execute () { return execute_warn_function_return (); }
8024 }; // class pass_warn_function_return
8026 } // anon namespace
8028 gimple_opt_pass *
8029 make_pass_warn_function_return (gcc::context *ctxt)
8031 return new pass_warn_function_return (ctxt);
8034 /* Walk a gimplified function and warn for functions whose return value is
8035 ignored and attribute((warn_unused_result)) is set. This is done before
8036 inlining, so we don't have to worry about that. */
8038 static void
8039 do_warn_unused_result (gimple_seq seq)
8041 tree fdecl, ftype;
8042 gimple_stmt_iterator i;
8044 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8046 gimple g = gsi_stmt (i);
8048 switch (gimple_code (g))
8050 case GIMPLE_BIND:
8051 do_warn_unused_result (gimple_bind_body (g));
8052 break;
8053 case GIMPLE_TRY:
8054 do_warn_unused_result (gimple_try_eval (g));
8055 do_warn_unused_result (gimple_try_cleanup (g));
8056 break;
8057 case GIMPLE_CATCH:
8058 do_warn_unused_result (gimple_catch_handler (g));
8059 break;
8060 case GIMPLE_EH_FILTER:
8061 do_warn_unused_result (gimple_eh_filter_failure (g));
8062 break;
8064 case GIMPLE_CALL:
8065 if (gimple_call_lhs (g))
8066 break;
8067 if (gimple_call_internal_p (g))
8068 break;
8070 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8071 LHS. All calls whose value is ignored should be
8072 represented like this. Look for the attribute. */
8073 fdecl = gimple_call_fndecl (g);
8074 ftype = gimple_call_fntype (g);
8076 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8078 location_t loc = gimple_location (g);
8080 if (fdecl)
8081 warning_at (loc, OPT_Wunused_result,
8082 "ignoring return value of %qD, "
8083 "declared with attribute warn_unused_result",
8084 fdecl);
8085 else
8086 warning_at (loc, OPT_Wunused_result,
8087 "ignoring return value of function "
8088 "declared with attribute warn_unused_result");
8090 break;
8092 default:
8093 /* Not a container, not a call, or a call whose value is used. */
8094 break;
8099 static unsigned int
8100 run_warn_unused_result (void)
8102 do_warn_unused_result (gimple_body (current_function_decl));
8103 return 0;
8106 static bool
8107 gate_warn_unused_result (void)
8109 return flag_warn_unused_result;
8112 namespace {
8114 const pass_data pass_data_warn_unused_result =
8116 GIMPLE_PASS, /* type */
8117 "*warn_unused_result", /* name */
8118 OPTGROUP_NONE, /* optinfo_flags */
8119 true, /* has_gate */
8120 true, /* has_execute */
8121 TV_NONE, /* tv_id */
8122 PROP_gimple_any, /* properties_required */
8123 0, /* properties_provided */
8124 0, /* properties_destroyed */
8125 0, /* todo_flags_start */
8126 0, /* todo_flags_finish */
8129 class pass_warn_unused_result : public gimple_opt_pass
8131 public:
8132 pass_warn_unused_result (gcc::context *ctxt)
8133 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8136 /* opt_pass methods: */
8137 bool gate () { return gate_warn_unused_result (); }
8138 unsigned int execute () { return run_warn_unused_result (); }
8140 }; // class pass_warn_unused_result
8142 } // anon namespace
8144 gimple_opt_pass *
8145 make_pass_warn_unused_result (gcc::context *ctxt)
8147 return new pass_warn_unused_result (ctxt);
8150 /* IPA passes, compilation of earlier functions or inlining
8151 might have changed some properties, such as marked functions nothrow,
8152 pure, const or noreturn.
8153 Remove redundant edges and basic blocks, and create new ones if necessary.
8155 This pass can't be executed as stand alone pass from pass manager, because
8156 in between inlining and this fixup the verify_flow_info would fail. */
8158 unsigned int
8159 execute_fixup_cfg (void)
8161 basic_block bb;
8162 gimple_stmt_iterator gsi;
8163 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8164 gcov_type count_scale;
8165 edge e;
8166 edge_iterator ei;
8168 count_scale
8169 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8170 ENTRY_BLOCK_PTR->count);
8172 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8173 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8174 count_scale);
8176 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8177 e->count = apply_scale (e->count, count_scale);
8179 FOR_EACH_BB (bb)
8181 bb->count = apply_scale (bb->count, count_scale);
8182 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8184 gimple stmt = gsi_stmt (gsi);
8185 tree decl = is_gimple_call (stmt)
8186 ? gimple_call_fndecl (stmt)
8187 : NULL;
8188 if (decl)
8190 int flags = gimple_call_flags (stmt);
8191 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8193 if (gimple_purge_dead_abnormal_call_edges (bb))
8194 todo |= TODO_cleanup_cfg;
8196 if (gimple_in_ssa_p (cfun))
8198 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8199 update_stmt (stmt);
8203 if (flags & ECF_NORETURN
8204 && fixup_noreturn_call (stmt))
8205 todo |= TODO_cleanup_cfg;
8208 if (maybe_clean_eh_stmt (stmt)
8209 && gimple_purge_dead_eh_edges (bb))
8210 todo |= TODO_cleanup_cfg;
8213 FOR_EACH_EDGE (e, ei, bb->succs)
8214 e->count = apply_scale (e->count, count_scale);
8216 /* If we have a basic block with no successors that does not
8217 end with a control statement or a noreturn call end it with
8218 a call to __builtin_unreachable. This situation can occur
8219 when inlining a noreturn call that does in fact return. */
8220 if (EDGE_COUNT (bb->succs) == 0)
8222 gimple stmt = last_stmt (bb);
8223 if (!stmt
8224 || (!is_ctrl_stmt (stmt)
8225 && (!is_gimple_call (stmt)
8226 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8228 stmt = gimple_build_call
8229 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8230 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8231 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8235 if (count_scale != REG_BR_PROB_BASE)
8236 compute_function_frequency ();
8238 /* We just processed all calls. */
8239 if (cfun->gimple_df)
8240 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8242 /* Dump a textual representation of the flowgraph. */
8243 if (dump_file)
8244 gimple_dump_cfg (dump_file, dump_flags);
8246 if (current_loops
8247 && (todo & TODO_cleanup_cfg))
8248 loops_state_set (LOOPS_NEED_FIXUP);
8250 return todo;
8253 namespace {
8255 const pass_data pass_data_fixup_cfg =
8257 GIMPLE_PASS, /* type */
8258 "*free_cfg_annotations", /* name */
8259 OPTGROUP_NONE, /* optinfo_flags */
8260 false, /* has_gate */
8261 true, /* has_execute */
8262 TV_NONE, /* tv_id */
8263 PROP_cfg, /* properties_required */
8264 0, /* properties_provided */
8265 0, /* properties_destroyed */
8266 0, /* todo_flags_start */
8267 0, /* todo_flags_finish */
8270 class pass_fixup_cfg : public gimple_opt_pass
8272 public:
8273 pass_fixup_cfg (gcc::context *ctxt)
8274 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8277 /* opt_pass methods: */
8278 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8279 unsigned int execute () { return execute_fixup_cfg (); }
8281 }; // class pass_fixup_cfg
8283 } // anon namespace
8285 gimple_opt_pass *
8286 make_pass_fixup_cfg (gcc::context *ctxt)
8288 return new pass_fixup_cfg (ctxt);
8291 /* Garbage collection support for edge_def. */
8293 extern void gt_ggc_mx (tree&);
8294 extern void gt_ggc_mx (gimple&);
8295 extern void gt_ggc_mx (rtx&);
8296 extern void gt_ggc_mx (basic_block&);
8298 void
8299 gt_ggc_mx (edge_def *e)
8301 tree block = LOCATION_BLOCK (e->goto_locus);
8302 gt_ggc_mx (e->src);
8303 gt_ggc_mx (e->dest);
8304 if (current_ir_type () == IR_GIMPLE)
8305 gt_ggc_mx (e->insns.g);
8306 else
8307 gt_ggc_mx (e->insns.r);
8308 gt_ggc_mx (block);
8311 /* PCH support for edge_def. */
8313 extern void gt_pch_nx (tree&);
8314 extern void gt_pch_nx (gimple&);
8315 extern void gt_pch_nx (rtx&);
8316 extern void gt_pch_nx (basic_block&);
8318 void
8319 gt_pch_nx (edge_def *e)
8321 tree block = LOCATION_BLOCK (e->goto_locus);
8322 gt_pch_nx (e->src);
8323 gt_pch_nx (e->dest);
8324 if (current_ir_type () == IR_GIMPLE)
8325 gt_pch_nx (e->insns.g);
8326 else
8327 gt_pch_nx (e->insns.r);
8328 gt_pch_nx (block);
8331 void
8332 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8334 tree block = LOCATION_BLOCK (e->goto_locus);
8335 op (&(e->src), cookie);
8336 op (&(e->dest), cookie);
8337 if (current_ir_type () == IR_GIMPLE)
8338 op (&(e->insns.g), cookie);
8339 else
8340 op (&(e->insns.r), cookie);
8341 op (&(block), cookie);