Fix dot dump bug
[official-gcc.git] / gcc / tree-cfg.c
blobe8246198147b39e20ae90e4a041a0733ffcfa30c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71 #include "wide-int.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static struct pointer_map_t *edge_to_cases;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs;
103 /* CFG statistics. */
104 struct cfg_stats_d
106 long num_merged_labels;
109 static struct cfg_stats_d cfg_stats;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
114 location_t locus;
115 int discriminator;
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 typedef locus_discrim_map value_type;
123 typedef locus_discrim_map compare_type;
124 static inline hashval_t hash (const value_type *);
125 static inline bool equal (const value_type *, const compare_type *);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
131 inline hashval_t
132 locus_discrim_hasher::hash (const value_type *item)
134 return LOCATION_LINE (item->locus);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
140 inline bool
141 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
146 static hash_table <locus_discrim_hasher> discriminator_per_locus;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple, gimple);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gimple);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (basic_block, tree);
174 static edge find_taken_edge_switch_expr (basic_block, tree);
175 static tree find_case_label_for_value (gimple, tree);
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus.create (13);
248 make_edges ();
249 assign_discriminators ();
250 cleanup_dead_labels ();
251 discriminator_per_locus.dispose ();
255 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
256 them and propagate the information to the loop. We assume that the
257 annotations come immediately before the condition of the loop. */
259 static void
260 replace_loop_annotate ()
262 struct loop *loop;
263 basic_block bb;
264 gimple_stmt_iterator gsi;
265 gimple stmt;
267 FOR_EACH_LOOP (loop, 0)
269 gsi = gsi_last_bb (loop->header);
270 stmt = gsi_stmt (gsi);
271 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
272 continue;
273 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
275 stmt = gsi_stmt (gsi);
276 if (gimple_code (stmt) != GIMPLE_CALL)
277 break;
278 if (!gimple_call_internal_p (stmt)
279 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
280 break;
281 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
283 case annot_expr_ivdep_kind:
284 loop->safelen = INT_MAX;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 default:
294 gcc_unreachable ();
296 stmt = gimple_build_assign (gimple_call_lhs (stmt),
297 gimple_call_arg (stmt, 0));
298 gsi_replace (&gsi, stmt, true);
302 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
303 FOR_EACH_BB_FN (bb, cfun)
305 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
307 stmt = gsi_stmt (gsi);
308 if (gimple_code (stmt) != GIMPLE_CALL)
309 break;
310 if (!gimple_call_internal_p (stmt)
311 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
312 break;
313 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
315 case annot_expr_ivdep_kind:
316 case annot_expr_no_vector_kind:
317 case annot_expr_vector_kind:
318 break;
319 default:
320 gcc_unreachable ();
322 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
323 stmt = gimple_build_assign (gimple_call_lhs (stmt),
324 gimple_call_arg (stmt, 0));
325 gsi_replace (&gsi, stmt, true);
331 static unsigned int
332 execute_build_cfg (void)
334 gimple_seq body = gimple_body (current_function_decl);
336 build_gimple_cfg (body);
337 gimple_set_body (current_function_decl, NULL);
338 if (dump_file && (dump_flags & TDF_DETAILS))
340 fprintf (dump_file, "Scope blocks:\n");
341 dump_scope_blocks (dump_file, dump_flags);
343 cleanup_tree_cfg ();
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
345 replace_loop_annotate ();
346 return 0;
349 namespace {
351 const pass_data pass_data_build_cfg =
353 GIMPLE_PASS, /* type */
354 "cfg", /* name */
355 OPTGROUP_NONE, /* optinfo_flags */
356 true, /* has_execute */
357 TV_TREE_CFG, /* tv_id */
358 PROP_gimple_leh, /* properties_required */
359 ( PROP_cfg | PROP_loops ), /* properties_provided */
360 0, /* properties_destroyed */
361 0, /* todo_flags_start */
362 0, /* todo_flags_finish */
365 class pass_build_cfg : public gimple_opt_pass
367 public:
368 pass_build_cfg (gcc::context *ctxt)
369 : gimple_opt_pass (pass_data_build_cfg, ctxt)
372 /* opt_pass methods: */
373 virtual unsigned int execute (function *) { return execute_build_cfg (); }
375 }; // class pass_build_cfg
377 } // anon namespace
379 gimple_opt_pass *
380 make_pass_build_cfg (gcc::context *ctxt)
382 return new pass_build_cfg (ctxt);
386 /* Return true if T is a computed goto. */
388 bool
389 computed_goto_p (gimple t)
391 return (gimple_code (t) == GIMPLE_GOTO
392 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
395 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
396 the other edge points to a bb with just __builtin_unreachable ().
397 I.e. return true for C->M edge in:
398 <bb C>:
400 if (something)
401 goto <bb N>;
402 else
403 goto <bb M>;
404 <bb N>:
405 __builtin_unreachable ();
406 <bb M>: */
408 bool
409 assert_unreachable_fallthru_edge_p (edge e)
411 basic_block pred_bb = e->src;
412 gimple last = last_stmt (pred_bb);
413 if (last && gimple_code (last) == GIMPLE_COND)
415 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
416 if (other_bb == e->dest)
417 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
418 if (EDGE_COUNT (other_bb->succs) == 0)
420 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
421 gimple stmt;
423 if (gsi_end_p (gsi))
424 return false;
425 stmt = gsi_stmt (gsi);
426 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
428 gsi_next (&gsi);
429 if (gsi_end_p (gsi))
430 return false;
431 stmt = gsi_stmt (gsi);
433 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
436 return false;
440 /* Build a flowgraph for the sequence of stmts SEQ. */
442 static void
443 make_blocks (gimple_seq seq)
445 gimple_stmt_iterator i = gsi_start (seq);
446 gimple stmt = NULL;
447 bool start_new_block = true;
448 bool first_stmt_of_seq = true;
449 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
451 while (!gsi_end_p (i))
453 gimple prev_stmt;
455 prev_stmt = stmt;
456 stmt = gsi_stmt (i);
458 /* If the statement starts a new basic block or if we have determined
459 in a previous pass that we need to create a new block for STMT, do
460 so now. */
461 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
463 if (!first_stmt_of_seq)
464 gsi_split_seq_before (&i, &seq);
465 bb = create_basic_block (seq, NULL, bb);
466 start_new_block = false;
469 /* Now add STMT to BB and create the subgraphs for special statement
470 codes. */
471 gimple_set_bb (stmt, bb);
473 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
474 next iteration. */
475 if (stmt_ends_bb_p (stmt))
477 /* If the stmt can make abnormal goto use a new temporary
478 for the assignment to the LHS. This makes sure the old value
479 of the LHS is available on the abnormal edge. Otherwise
480 we will end up with overlapping life-ranges for abnormal
481 SSA names. */
482 if (gimple_has_lhs (stmt)
483 && stmt_can_make_abnormal_goto (stmt)
484 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
486 tree lhs = gimple_get_lhs (stmt);
487 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
488 gimple s = gimple_build_assign (lhs, tmp);
489 gimple_set_location (s, gimple_location (stmt));
490 gimple_set_block (s, gimple_block (stmt));
491 gimple_set_lhs (stmt, tmp);
492 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
493 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
494 DECL_GIMPLE_REG_P (tmp) = 1;
495 gsi_insert_after (&i, s, GSI_SAME_STMT);
497 start_new_block = true;
500 gsi_next (&i);
501 first_stmt_of_seq = false;
506 /* Create and return a new empty basic block after bb AFTER. */
508 static basic_block
509 create_bb (void *h, void *e, basic_block after)
511 basic_block bb;
513 gcc_assert (!e);
515 /* Create and initialize a new basic block. Since alloc_block uses
516 GC allocation that clears memory to allocate a basic block, we do
517 not have to clear the newly allocated basic block here. */
518 bb = alloc_block ();
520 bb->index = last_basic_block_for_fn (cfun);
521 bb->flags = BB_NEW;
522 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
524 /* Add the new block to the linked list of blocks. */
525 link_block (bb, after);
527 /* Grow the basic block array if needed. */
528 if ((size_t) last_basic_block_for_fn (cfun)
529 == basic_block_info_for_fn (cfun)->length ())
531 size_t new_size =
532 (last_basic_block_for_fn (cfun)
533 + (last_basic_block_for_fn (cfun) + 3) / 4);
534 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
537 /* Add the newly created block to the array. */
538 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
540 n_basic_blocks_for_fn (cfun)++;
541 last_basic_block_for_fn (cfun)++;
543 return bb;
547 /*---------------------------------------------------------------------------
548 Edge creation
549 ---------------------------------------------------------------------------*/
551 /* Fold COND_EXPR_COND of each COND_EXPR. */
553 void
554 fold_cond_expr_cond (void)
556 basic_block bb;
558 FOR_EACH_BB_FN (bb, cfun)
560 gimple stmt = last_stmt (bb);
562 if (stmt && gimple_code (stmt) == GIMPLE_COND)
564 location_t loc = gimple_location (stmt);
565 tree cond;
566 bool zerop, onep;
568 fold_defer_overflow_warnings ();
569 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
570 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
571 if (cond)
573 zerop = integer_zerop (cond);
574 onep = integer_onep (cond);
576 else
577 zerop = onep = false;
579 fold_undefer_overflow_warnings (zerop || onep,
580 stmt,
581 WARN_STRICT_OVERFLOW_CONDITIONAL);
582 if (zerop)
583 gimple_cond_make_false (stmt);
584 else if (onep)
585 gimple_cond_make_true (stmt);
590 /* If basic block BB has an abnormal edge to a basic block
591 containing IFN_ABNORMAL_DISPATCHER internal call, return
592 that the dispatcher's basic block, otherwise return NULL. */
594 basic_block
595 get_abnormal_succ_dispatcher (basic_block bb)
597 edge e;
598 edge_iterator ei;
600 FOR_EACH_EDGE (e, ei, bb->succs)
601 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
603 gimple_stmt_iterator gsi
604 = gsi_start_nondebug_after_labels_bb (e->dest);
605 gimple g = gsi_stmt (gsi);
606 if (g
607 && is_gimple_call (g)
608 && gimple_call_internal_p (g)
609 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
610 return e->dest;
612 return NULL;
615 /* Helper function for make_edges. Create a basic block with
616 with ABNORMAL_DISPATCHER internal call in it if needed, and
617 create abnormal edges from BBS to it and from it to FOR_BB
618 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
620 static void
621 handle_abnormal_edges (basic_block *dispatcher_bbs,
622 basic_block for_bb, int *bb_to_omp_idx,
623 auto_vec<basic_block> *bbs, bool computed_goto)
625 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
626 unsigned int idx = 0;
627 basic_block bb;
628 bool inner = false;
630 if (bb_to_omp_idx)
632 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
633 if (bb_to_omp_idx[for_bb->index] != 0)
634 inner = true;
637 /* If the dispatcher has been created already, then there are basic
638 blocks with abnormal edges to it, so just make a new edge to
639 for_bb. */
640 if (*dispatcher == NULL)
642 /* Check if there are any basic blocks that need to have
643 abnormal edges to this dispatcher. If there are none, return
644 early. */
645 if (bb_to_omp_idx == NULL)
647 if (bbs->is_empty ())
648 return;
650 else
652 FOR_EACH_VEC_ELT (*bbs, idx, bb)
653 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
654 break;
655 if (bb == NULL)
656 return;
659 /* Create the dispatcher bb. */
660 *dispatcher = create_basic_block (NULL, NULL, for_bb);
661 if (computed_goto)
663 /* Factor computed gotos into a common computed goto site. Also
664 record the location of that site so that we can un-factor the
665 gotos after we have converted back to normal form. */
666 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
668 /* Create the destination of the factored goto. Each original
669 computed goto will put its desired destination into this
670 variable and jump to the label we create immediately below. */
671 tree var = create_tmp_var (ptr_type_node, "gotovar");
673 /* Build a label for the new block which will contain the
674 factored computed goto. */
675 tree factored_label_decl
676 = create_artificial_label (UNKNOWN_LOCATION);
677 gimple factored_computed_goto_label
678 = gimple_build_label (factored_label_decl);
679 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
681 /* Build our new computed goto. */
682 gimple factored_computed_goto = gimple_build_goto (var);
683 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
685 FOR_EACH_VEC_ELT (*bbs, idx, bb)
687 if (bb_to_omp_idx
688 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
689 continue;
691 gsi = gsi_last_bb (bb);
692 gimple last = gsi_stmt (gsi);
694 gcc_assert (computed_goto_p (last));
696 /* Copy the original computed goto's destination into VAR. */
697 gimple assignment
698 = gimple_build_assign (var, gimple_goto_dest (last));
699 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
701 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
702 e->goto_locus = gimple_location (last);
703 gsi_remove (&gsi, true);
706 else
708 tree arg = inner ? boolean_true_node : boolean_false_node;
709 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
710 1, arg);
711 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
712 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
714 /* Create predecessor edges of the dispatcher. */
715 FOR_EACH_VEC_ELT (*bbs, idx, bb)
717 if (bb_to_omp_idx
718 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
719 continue;
720 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
725 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
728 /* Join all the blocks in the flowgraph. */
730 static void
731 make_edges (void)
733 basic_block bb;
734 struct omp_region *cur_region = NULL;
735 auto_vec<basic_block> ab_edge_goto;
736 auto_vec<basic_block> ab_edge_call;
737 int *bb_to_omp_idx = NULL;
738 int cur_omp_region_idx = 0;
740 /* Create an edge from entry to the first block with executable
741 statements in it. */
742 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
743 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
744 EDGE_FALLTHRU);
746 /* Traverse the basic block array placing edges. */
747 FOR_EACH_BB_FN (bb, cfun)
749 gimple last = last_stmt (bb);
750 bool fallthru;
752 if (bb_to_omp_idx)
753 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
755 if (last)
757 enum gimple_code code = gimple_code (last);
758 switch (code)
760 case GIMPLE_GOTO:
761 if (make_goto_expr_edges (bb))
762 ab_edge_goto.safe_push (bb);
763 fallthru = false;
764 break;
765 case GIMPLE_RETURN:
767 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
768 e->goto_locus = gimple_location (last);
769 fallthru = false;
771 break;
772 case GIMPLE_COND:
773 make_cond_expr_edges (bb);
774 fallthru = false;
775 break;
776 case GIMPLE_SWITCH:
777 make_gimple_switch_edges (bb);
778 fallthru = false;
779 break;
780 case GIMPLE_RESX:
781 make_eh_edges (last);
782 fallthru = false;
783 break;
784 case GIMPLE_EH_DISPATCH:
785 fallthru = make_eh_dispatch_edges (last);
786 break;
788 case GIMPLE_CALL:
789 /* If this function receives a nonlocal goto, then we need to
790 make edges from this call site to all the nonlocal goto
791 handlers. */
792 if (stmt_can_make_abnormal_goto (last))
793 ab_edge_call.safe_push (bb);
795 /* If this statement has reachable exception handlers, then
796 create abnormal edges to them. */
797 make_eh_edges (last);
799 /* BUILTIN_RETURN is really a return statement. */
800 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
802 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
803 fallthru = false;
805 /* Some calls are known not to return. */
806 else
807 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
808 break;
810 case GIMPLE_ASSIGN:
811 /* A GIMPLE_ASSIGN may throw internally and thus be considered
812 control-altering. */
813 if (is_ctrl_altering_stmt (last))
814 make_eh_edges (last);
815 fallthru = true;
816 break;
818 case GIMPLE_ASM:
819 make_gimple_asm_edges (bb);
820 fallthru = true;
821 break;
823 CASE_GIMPLE_OMP:
824 fallthru = make_gimple_omp_edges (bb, &cur_region,
825 &cur_omp_region_idx);
826 if (cur_region && bb_to_omp_idx == NULL)
827 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
828 break;
830 case GIMPLE_TRANSACTION:
832 tree abort_label = gimple_transaction_label (last);
833 if (abort_label)
834 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
835 fallthru = true;
837 break;
839 default:
840 gcc_assert (!stmt_ends_bb_p (last));
841 fallthru = true;
844 else
845 fallthru = true;
847 if (fallthru)
848 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
851 /* Computed gotos are hell to deal with, especially if there are
852 lots of them with a large number of destinations. So we factor
853 them to a common computed goto location before we build the
854 edge list. After we convert back to normal form, we will un-factor
855 the computed gotos since factoring introduces an unwanted jump.
856 For non-local gotos and abnormal edges from calls to calls that return
857 twice or forced labels, factor the abnormal edges too, by having all
858 abnormal edges from the calls go to a common artificial basic block
859 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
860 basic block to all forced labels and calls returning twice.
861 We do this per-OpenMP structured block, because those regions
862 are guaranteed to be single entry single exit by the standard,
863 so it is not allowed to enter or exit such regions abnormally this way,
864 thus all computed gotos, non-local gotos and setjmp/longjmp calls
865 must not transfer control across SESE region boundaries. */
866 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
868 gimple_stmt_iterator gsi;
869 basic_block dispatcher_bb_array[2] = { NULL, NULL };
870 basic_block *dispatcher_bbs = dispatcher_bb_array;
871 int count = n_basic_blocks_for_fn (cfun);
873 if (bb_to_omp_idx)
874 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
876 FOR_EACH_BB_FN (bb, cfun)
878 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
880 gimple label_stmt = gsi_stmt (gsi);
881 tree target;
883 if (gimple_code (label_stmt) != GIMPLE_LABEL)
884 break;
886 target = gimple_label_label (label_stmt);
888 /* Make an edge to every label block that has been marked as a
889 potential target for a computed goto or a non-local goto. */
890 if (FORCED_LABEL (target))
891 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
892 &ab_edge_goto, true);
893 if (DECL_NONLOCAL (target))
895 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
896 &ab_edge_call, false);
897 break;
901 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
902 gsi_next_nondebug (&gsi);
903 if (!gsi_end_p (gsi))
905 /* Make an edge to every setjmp-like call. */
906 gimple call_stmt = gsi_stmt (gsi);
907 if (is_gimple_call (call_stmt)
908 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
909 || gimple_call_builtin_p (call_stmt,
910 BUILT_IN_SETJMP_RECEIVER)))
911 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
912 &ab_edge_call, false);
916 if (bb_to_omp_idx)
917 XDELETE (dispatcher_bbs);
920 XDELETE (bb_to_omp_idx);
922 free_omp_regions ();
924 /* Fold COND_EXPR_COND of each COND_EXPR. */
925 fold_cond_expr_cond ();
928 /* Find the next available discriminator value for LOCUS. The
929 discriminator distinguishes among several basic blocks that
930 share a common locus, allowing for more accurate sample-based
931 profiling. */
933 static int
934 next_discriminator_for_locus (location_t locus)
936 struct locus_discrim_map item;
937 struct locus_discrim_map **slot;
939 item.locus = locus;
940 item.discriminator = 0;
941 slot = discriminator_per_locus.find_slot_with_hash (
942 &item, LOCATION_LINE (locus), INSERT);
943 gcc_assert (slot);
944 if (*slot == HTAB_EMPTY_ENTRY)
946 *slot = XNEW (struct locus_discrim_map);
947 gcc_assert (*slot);
948 (*slot)->locus = locus;
949 (*slot)->discriminator = 0;
951 (*slot)->discriminator++;
952 return (*slot)->discriminator;
955 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
957 static bool
958 same_line_p (location_t locus1, location_t locus2)
960 expanded_location from, to;
962 if (locus1 == locus2)
963 return true;
965 from = expand_location (locus1);
966 to = expand_location (locus2);
968 if (from.line != to.line)
969 return false;
970 if (from.file == to.file)
971 return true;
972 return (from.file != NULL
973 && to.file != NULL
974 && filename_cmp (from.file, to.file) == 0);
977 /* Assign discriminators to each basic block. */
979 static void
980 assign_discriminators (void)
982 basic_block bb;
984 FOR_EACH_BB_FN (bb, cfun)
986 edge e;
987 edge_iterator ei;
988 gimple last = last_stmt (bb);
989 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
991 if (locus == UNKNOWN_LOCATION)
992 continue;
994 FOR_EACH_EDGE (e, ei, bb->succs)
996 gimple first = first_non_label_stmt (e->dest);
997 gimple last = last_stmt (e->dest);
998 if ((first && same_line_p (locus, gimple_location (first)))
999 || (last && same_line_p (locus, gimple_location (last))))
1001 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1002 bb->discriminator = next_discriminator_for_locus (locus);
1003 else
1004 e->dest->discriminator = next_discriminator_for_locus (locus);
1010 /* Create the edges for a GIMPLE_COND starting at block BB. */
1012 static void
1013 make_cond_expr_edges (basic_block bb)
1015 gimple entry = last_stmt (bb);
1016 gimple then_stmt, else_stmt;
1017 basic_block then_bb, else_bb;
1018 tree then_label, else_label;
1019 edge e;
1021 gcc_assert (entry);
1022 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1024 /* Entry basic blocks for each component. */
1025 then_label = gimple_cond_true_label (entry);
1026 else_label = gimple_cond_false_label (entry);
1027 then_bb = label_to_block (then_label);
1028 else_bb = label_to_block (else_label);
1029 then_stmt = first_stmt (then_bb);
1030 else_stmt = first_stmt (else_bb);
1032 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1033 e->goto_locus = gimple_location (then_stmt);
1034 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1035 if (e)
1036 e->goto_locus = gimple_location (else_stmt);
1038 /* We do not need the labels anymore. */
1039 gimple_cond_set_true_label (entry, NULL_TREE);
1040 gimple_cond_set_false_label (entry, NULL_TREE);
1044 /* Called for each element in the hash table (P) as we delete the
1045 edge to cases hash table.
1047 Clear all the TREE_CHAINs to prevent problems with copying of
1048 SWITCH_EXPRs and structure sharing rules, then free the hash table
1049 element. */
1051 static bool
1052 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
1053 void *data ATTRIBUTE_UNUSED)
1055 tree t, next;
1057 for (t = (tree) *value; t; t = next)
1059 next = CASE_CHAIN (t);
1060 CASE_CHAIN (t) = NULL;
1063 *value = NULL;
1064 return true;
1067 /* Start recording information mapping edges to case labels. */
1069 void
1070 start_recording_case_labels (void)
1072 gcc_assert (edge_to_cases == NULL);
1073 edge_to_cases = pointer_map_create ();
1074 touched_switch_bbs = BITMAP_ALLOC (NULL);
1077 /* Return nonzero if we are recording information for case labels. */
1079 static bool
1080 recording_case_labels_p (void)
1082 return (edge_to_cases != NULL);
1085 /* Stop recording information mapping edges to case labels and
1086 remove any information we have recorded. */
1087 void
1088 end_recording_case_labels (void)
1090 bitmap_iterator bi;
1091 unsigned i;
1092 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
1093 pointer_map_destroy (edge_to_cases);
1094 edge_to_cases = NULL;
1095 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1097 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1098 if (bb)
1100 gimple stmt = last_stmt (bb);
1101 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1102 group_case_labels_stmt (stmt);
1105 BITMAP_FREE (touched_switch_bbs);
1108 /* If we are inside a {start,end}_recording_cases block, then return
1109 a chain of CASE_LABEL_EXPRs from T which reference E.
1111 Otherwise return NULL. */
1113 static tree
1114 get_cases_for_edge (edge e, gimple t)
1116 void **slot;
1117 size_t i, n;
1119 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1120 chains available. Return NULL so the caller can detect this case. */
1121 if (!recording_case_labels_p ())
1122 return NULL;
1124 slot = pointer_map_contains (edge_to_cases, e);
1125 if (slot)
1126 return (tree) *slot;
1128 /* If we did not find E in the hash table, then this must be the first
1129 time we have been queried for information about E & T. Add all the
1130 elements from T to the hash table then perform the query again. */
1132 n = gimple_switch_num_labels (t);
1133 for (i = 0; i < n; i++)
1135 tree elt = gimple_switch_label (t, i);
1136 tree lab = CASE_LABEL (elt);
1137 basic_block label_bb = label_to_block (lab);
1138 edge this_edge = find_edge (e->src, label_bb);
1140 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1141 a new chain. */
1142 slot = pointer_map_insert (edge_to_cases, this_edge);
1143 CASE_CHAIN (elt) = (tree) *slot;
1144 *slot = elt;
1147 return (tree) *pointer_map_contains (edge_to_cases, e);
1150 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1152 static void
1153 make_gimple_switch_edges (basic_block bb)
1155 gimple entry = last_stmt (bb);
1156 size_t i, n;
1158 n = gimple_switch_num_labels (entry);
1160 for (i = 0; i < n; ++i)
1162 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1163 basic_block label_bb = label_to_block (lab);
1164 make_edge (bb, label_bb, 0);
1169 /* Return the basic block holding label DEST. */
1171 basic_block
1172 label_to_block_fn (struct function *ifun, tree dest)
1174 int uid = LABEL_DECL_UID (dest);
1176 /* We would die hard when faced by an undefined label. Emit a label to
1177 the very first basic block. This will hopefully make even the dataflow
1178 and undefined variable warnings quite right. */
1179 if (seen_error () && uid < 0)
1181 gimple_stmt_iterator gsi =
1182 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1183 gimple stmt;
1185 stmt = gimple_build_label (dest);
1186 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1187 uid = LABEL_DECL_UID (dest);
1189 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1190 return NULL;
1191 return (*ifun->cfg->x_label_to_block_map)[uid];
1194 /* Create edges for a goto statement at block BB. Returns true
1195 if abnormal edges should be created. */
1197 static bool
1198 make_goto_expr_edges (basic_block bb)
1200 gimple_stmt_iterator last = gsi_last_bb (bb);
1201 gimple goto_t = gsi_stmt (last);
1203 /* A simple GOTO creates normal edges. */
1204 if (simple_goto_p (goto_t))
1206 tree dest = gimple_goto_dest (goto_t);
1207 basic_block label_bb = label_to_block (dest);
1208 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1209 e->goto_locus = gimple_location (goto_t);
1210 gsi_remove (&last, true);
1211 return false;
1214 /* A computed GOTO creates abnormal edges. */
1215 return true;
1218 /* Create edges for an asm statement with labels at block BB. */
1220 static void
1221 make_gimple_asm_edges (basic_block bb)
1223 gimple stmt = last_stmt (bb);
1224 int i, n = gimple_asm_nlabels (stmt);
1226 for (i = 0; i < n; ++i)
1228 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1229 basic_block label_bb = label_to_block (label);
1230 make_edge (bb, label_bb, 0);
1234 /*---------------------------------------------------------------------------
1235 Flowgraph analysis
1236 ---------------------------------------------------------------------------*/
1238 /* Cleanup useless labels in basic blocks. This is something we wish
1239 to do early because it allows us to group case labels before creating
1240 the edges for the CFG, and it speeds up block statement iterators in
1241 all passes later on.
1242 We rerun this pass after CFG is created, to get rid of the labels that
1243 are no longer referenced. After then we do not run it any more, since
1244 (almost) no new labels should be created. */
1246 /* A map from basic block index to the leading label of that block. */
1247 static struct label_record
1249 /* The label. */
1250 tree label;
1252 /* True if the label is referenced from somewhere. */
1253 bool used;
1254 } *label_for_bb;
1256 /* Given LABEL return the first label in the same basic block. */
1258 static tree
1259 main_block_label (tree label)
1261 basic_block bb = label_to_block (label);
1262 tree main_label = label_for_bb[bb->index].label;
1264 /* label_to_block possibly inserted undefined label into the chain. */
1265 if (!main_label)
1267 label_for_bb[bb->index].label = label;
1268 main_label = label;
1271 label_for_bb[bb->index].used = true;
1272 return main_label;
1275 /* Clean up redundant labels within the exception tree. */
1277 static void
1278 cleanup_dead_labels_eh (void)
1280 eh_landing_pad lp;
1281 eh_region r;
1282 tree lab;
1283 int i;
1285 if (cfun->eh == NULL)
1286 return;
1288 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1289 if (lp && lp->post_landing_pad)
1291 lab = main_block_label (lp->post_landing_pad);
1292 if (lab != lp->post_landing_pad)
1294 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1295 EH_LANDING_PAD_NR (lab) = lp->index;
1299 FOR_ALL_EH_REGION (r)
1300 switch (r->type)
1302 case ERT_CLEANUP:
1303 case ERT_MUST_NOT_THROW:
1304 break;
1306 case ERT_TRY:
1308 eh_catch c;
1309 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1311 lab = c->label;
1312 if (lab)
1313 c->label = main_block_label (lab);
1316 break;
1318 case ERT_ALLOWED_EXCEPTIONS:
1319 lab = r->u.allowed.label;
1320 if (lab)
1321 r->u.allowed.label = main_block_label (lab);
1322 break;
1327 /* Cleanup redundant labels. This is a three-step process:
1328 1) Find the leading label for each block.
1329 2) Redirect all references to labels to the leading labels.
1330 3) Cleanup all useless labels. */
1332 void
1333 cleanup_dead_labels (void)
1335 basic_block bb;
1336 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1338 /* Find a suitable label for each block. We use the first user-defined
1339 label if there is one, or otherwise just the first label we see. */
1340 FOR_EACH_BB_FN (bb, cfun)
1342 gimple_stmt_iterator i;
1344 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1346 tree label;
1347 gimple stmt = gsi_stmt (i);
1349 if (gimple_code (stmt) != GIMPLE_LABEL)
1350 break;
1352 label = gimple_label_label (stmt);
1354 /* If we have not yet seen a label for the current block,
1355 remember this one and see if there are more labels. */
1356 if (!label_for_bb[bb->index].label)
1358 label_for_bb[bb->index].label = label;
1359 continue;
1362 /* If we did see a label for the current block already, but it
1363 is an artificially created label, replace it if the current
1364 label is a user defined label. */
1365 if (!DECL_ARTIFICIAL (label)
1366 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1368 label_for_bb[bb->index].label = label;
1369 break;
1374 /* Now redirect all jumps/branches to the selected label.
1375 First do so for each block ending in a control statement. */
1376 FOR_EACH_BB_FN (bb, cfun)
1378 gimple stmt = last_stmt (bb);
1379 tree label, new_label;
1381 if (!stmt)
1382 continue;
1384 switch (gimple_code (stmt))
1386 case GIMPLE_COND:
1387 label = gimple_cond_true_label (stmt);
1388 if (label)
1390 new_label = main_block_label (label);
1391 if (new_label != label)
1392 gimple_cond_set_true_label (stmt, new_label);
1395 label = gimple_cond_false_label (stmt);
1396 if (label)
1398 new_label = main_block_label (label);
1399 if (new_label != label)
1400 gimple_cond_set_false_label (stmt, new_label);
1402 break;
1404 case GIMPLE_SWITCH:
1406 size_t i, n = gimple_switch_num_labels (stmt);
1408 /* Replace all destination labels. */
1409 for (i = 0; i < n; ++i)
1411 tree case_label = gimple_switch_label (stmt, i);
1412 label = CASE_LABEL (case_label);
1413 new_label = main_block_label (label);
1414 if (new_label != label)
1415 CASE_LABEL (case_label) = new_label;
1417 break;
1420 case GIMPLE_ASM:
1422 int i, n = gimple_asm_nlabels (stmt);
1424 for (i = 0; i < n; ++i)
1426 tree cons = gimple_asm_label_op (stmt, i);
1427 tree label = main_block_label (TREE_VALUE (cons));
1428 TREE_VALUE (cons) = label;
1430 break;
1433 /* We have to handle gotos until they're removed, and we don't
1434 remove them until after we've created the CFG edges. */
1435 case GIMPLE_GOTO:
1436 if (!computed_goto_p (stmt))
1438 label = gimple_goto_dest (stmt);
1439 new_label = main_block_label (label);
1440 if (new_label != label)
1441 gimple_goto_set_dest (stmt, new_label);
1443 break;
1445 case GIMPLE_TRANSACTION:
1447 tree label = gimple_transaction_label (stmt);
1448 if (label)
1450 tree new_label = main_block_label (label);
1451 if (new_label != label)
1452 gimple_transaction_set_label (stmt, new_label);
1455 break;
1457 default:
1458 break;
1462 /* Do the same for the exception region tree labels. */
1463 cleanup_dead_labels_eh ();
1465 /* Finally, purge dead labels. All user-defined labels and labels that
1466 can be the target of non-local gotos and labels which have their
1467 address taken are preserved. */
1468 FOR_EACH_BB_FN (bb, cfun)
1470 gimple_stmt_iterator i;
1471 tree label_for_this_bb = label_for_bb[bb->index].label;
1473 if (!label_for_this_bb)
1474 continue;
1476 /* If the main label of the block is unused, we may still remove it. */
1477 if (!label_for_bb[bb->index].used)
1478 label_for_this_bb = NULL;
1480 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1482 tree label;
1483 gimple stmt = gsi_stmt (i);
1485 if (gimple_code (stmt) != GIMPLE_LABEL)
1486 break;
1488 label = gimple_label_label (stmt);
1490 if (label == label_for_this_bb
1491 || !DECL_ARTIFICIAL (label)
1492 || DECL_NONLOCAL (label)
1493 || FORCED_LABEL (label))
1494 gsi_next (&i);
1495 else
1496 gsi_remove (&i, true);
1500 free (label_for_bb);
1503 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1504 the ones jumping to the same label.
1505 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1507 void
1508 group_case_labels_stmt (gimple stmt)
1510 int old_size = gimple_switch_num_labels (stmt);
1511 int i, j, new_size = old_size;
1512 basic_block default_bb = NULL;
1514 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1516 /* Look for possible opportunities to merge cases. */
1517 i = 1;
1518 while (i < old_size)
1520 tree base_case, base_high;
1521 basic_block base_bb;
1523 base_case = gimple_switch_label (stmt, i);
1525 gcc_assert (base_case);
1526 base_bb = label_to_block (CASE_LABEL (base_case));
1528 /* Discard cases that have the same destination as the
1529 default case. */
1530 if (base_bb == default_bb)
1532 gimple_switch_set_label (stmt, i, NULL_TREE);
1533 i++;
1534 new_size--;
1535 continue;
1538 base_high = CASE_HIGH (base_case)
1539 ? CASE_HIGH (base_case)
1540 : CASE_LOW (base_case);
1541 i++;
1543 /* Try to merge case labels. Break out when we reach the end
1544 of the label vector or when we cannot merge the next case
1545 label with the current one. */
1546 while (i < old_size)
1548 tree merge_case = gimple_switch_label (stmt, i);
1549 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1550 wide_int bhp1 = wi::add (base_high, 1);
1552 /* Merge the cases if they jump to the same place,
1553 and their ranges are consecutive. */
1554 if (merge_bb == base_bb
1555 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1557 base_high = CASE_HIGH (merge_case) ?
1558 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1559 CASE_HIGH (base_case) = base_high;
1560 gimple_switch_set_label (stmt, i, NULL_TREE);
1561 new_size--;
1562 i++;
1564 else
1565 break;
1569 /* Compress the case labels in the label vector, and adjust the
1570 length of the vector. */
1571 for (i = 0, j = 0; i < new_size; i++)
1573 while (! gimple_switch_label (stmt, j))
1574 j++;
1575 gimple_switch_set_label (stmt, i,
1576 gimple_switch_label (stmt, j++));
1579 gcc_assert (new_size <= old_size);
1580 gimple_switch_set_num_labels (stmt, new_size);
1583 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1584 and scan the sorted vector of cases. Combine the ones jumping to the
1585 same label. */
1587 void
1588 group_case_labels (void)
1590 basic_block bb;
1592 FOR_EACH_BB_FN (bb, cfun)
1594 gimple stmt = last_stmt (bb);
1595 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1596 group_case_labels_stmt (stmt);
1600 /* Checks whether we can merge block B into block A. */
1602 static bool
1603 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1605 gimple stmt;
1606 gimple_stmt_iterator gsi;
1608 if (!single_succ_p (a))
1609 return false;
1611 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1612 return false;
1614 if (single_succ (a) != b)
1615 return false;
1617 if (!single_pred_p (b))
1618 return false;
1620 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1621 return false;
1623 /* If A ends by a statement causing exceptions or something similar, we
1624 cannot merge the blocks. */
1625 stmt = last_stmt (a);
1626 if (stmt && stmt_ends_bb_p (stmt))
1627 return false;
1629 /* Do not allow a block with only a non-local label to be merged. */
1630 if (stmt
1631 && gimple_code (stmt) == GIMPLE_LABEL
1632 && DECL_NONLOCAL (gimple_label_label (stmt)))
1633 return false;
1635 /* Examine the labels at the beginning of B. */
1636 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1638 tree lab;
1639 stmt = gsi_stmt (gsi);
1640 if (gimple_code (stmt) != GIMPLE_LABEL)
1641 break;
1642 lab = gimple_label_label (stmt);
1644 /* Do not remove user forced labels or for -O0 any user labels. */
1645 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1646 return false;
1649 /* Protect the loop latches. */
1650 if (current_loops && b->loop_father->latch == b)
1651 return false;
1653 /* It must be possible to eliminate all phi nodes in B. If ssa form
1654 is not up-to-date and a name-mapping is registered, we cannot eliminate
1655 any phis. Symbols marked for renaming are never a problem though. */
1656 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1658 gimple phi = gsi_stmt (gsi);
1659 /* Technically only new names matter. */
1660 if (name_registered_for_update_p (PHI_RESULT (phi)))
1661 return false;
1664 /* When not optimizing, don't merge if we'd lose goto_locus. */
1665 if (!optimize
1666 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1668 location_t goto_locus = single_succ_edge (a)->goto_locus;
1669 gimple_stmt_iterator prev, next;
1670 prev = gsi_last_nondebug_bb (a);
1671 next = gsi_after_labels (b);
1672 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1673 gsi_next_nondebug (&next);
1674 if ((gsi_end_p (prev)
1675 || gimple_location (gsi_stmt (prev)) != goto_locus)
1676 && (gsi_end_p (next)
1677 || gimple_location (gsi_stmt (next)) != goto_locus))
1678 return false;
1681 return true;
1684 /* Replaces all uses of NAME by VAL. */
1686 void
1687 replace_uses_by (tree name, tree val)
1689 imm_use_iterator imm_iter;
1690 use_operand_p use;
1691 gimple stmt;
1692 edge e;
1694 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1696 /* Mark the block if we change the last stmt in it. */
1697 if (cfgcleanup_altered_bbs
1698 && stmt_ends_bb_p (stmt))
1699 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1701 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1703 replace_exp (use, val);
1705 if (gimple_code (stmt) == GIMPLE_PHI)
1707 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1708 if (e->flags & EDGE_ABNORMAL)
1710 /* This can only occur for virtual operands, since
1711 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1712 would prevent replacement. */
1713 gcc_checking_assert (virtual_operand_p (name));
1714 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1719 if (gimple_code (stmt) != GIMPLE_PHI)
1721 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1722 gimple orig_stmt = stmt;
1723 size_t i;
1725 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1726 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1727 only change sth from non-invariant to invariant, and only
1728 when propagating constants. */
1729 if (is_gimple_min_invariant (val))
1730 for (i = 0; i < gimple_num_ops (stmt); i++)
1732 tree op = gimple_op (stmt, i);
1733 /* Operands may be empty here. For example, the labels
1734 of a GIMPLE_COND are nulled out following the creation
1735 of the corresponding CFG edges. */
1736 if (op && TREE_CODE (op) == ADDR_EXPR)
1737 recompute_tree_invariant_for_addr_expr (op);
1740 if (fold_stmt (&gsi))
1741 stmt = gsi_stmt (gsi);
1743 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1744 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1746 update_stmt (stmt);
1750 gcc_checking_assert (has_zero_uses (name));
1752 /* Also update the trees stored in loop structures. */
1753 if (current_loops)
1755 struct loop *loop;
1757 FOR_EACH_LOOP (loop, 0)
1759 substitute_in_loop_info (loop, name, val);
1764 /* Merge block B into block A. */
1766 static void
1767 gimple_merge_blocks (basic_block a, basic_block b)
1769 gimple_stmt_iterator last, gsi, psi;
1771 if (dump_file)
1772 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1774 /* Remove all single-valued PHI nodes from block B of the form
1775 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1776 gsi = gsi_last_bb (a);
1777 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1779 gimple phi = gsi_stmt (psi);
1780 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1781 gimple copy;
1782 bool may_replace_uses = (virtual_operand_p (def)
1783 || may_propagate_copy (def, use));
1785 /* In case we maintain loop closed ssa form, do not propagate arguments
1786 of loop exit phi nodes. */
1787 if (current_loops
1788 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1789 && !virtual_operand_p (def)
1790 && TREE_CODE (use) == SSA_NAME
1791 && a->loop_father != b->loop_father)
1792 may_replace_uses = false;
1794 if (!may_replace_uses)
1796 gcc_assert (!virtual_operand_p (def));
1798 /* Note that just emitting the copies is fine -- there is no problem
1799 with ordering of phi nodes. This is because A is the single
1800 predecessor of B, therefore results of the phi nodes cannot
1801 appear as arguments of the phi nodes. */
1802 copy = gimple_build_assign (def, use);
1803 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1804 remove_phi_node (&psi, false);
1806 else
1808 /* If we deal with a PHI for virtual operands, we can simply
1809 propagate these without fussing with folding or updating
1810 the stmt. */
1811 if (virtual_operand_p (def))
1813 imm_use_iterator iter;
1814 use_operand_p use_p;
1815 gimple stmt;
1817 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1818 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1819 SET_USE (use_p, use);
1821 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1822 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1824 else
1825 replace_uses_by (def, use);
1827 remove_phi_node (&psi, true);
1831 /* Ensure that B follows A. */
1832 move_block_after (b, a);
1834 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1835 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1837 /* Remove labels from B and set gimple_bb to A for other statements. */
1838 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1840 gimple stmt = gsi_stmt (gsi);
1841 if (gimple_code (stmt) == GIMPLE_LABEL)
1843 tree label = gimple_label_label (stmt);
1844 int lp_nr;
1846 gsi_remove (&gsi, false);
1848 /* Now that we can thread computed gotos, we might have
1849 a situation where we have a forced label in block B
1850 However, the label at the start of block B might still be
1851 used in other ways (think about the runtime checking for
1852 Fortran assigned gotos). So we can not just delete the
1853 label. Instead we move the label to the start of block A. */
1854 if (FORCED_LABEL (label))
1856 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1857 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1859 /* Other user labels keep around in a form of a debug stmt. */
1860 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1862 gimple dbg = gimple_build_debug_bind (label,
1863 integer_zero_node,
1864 stmt);
1865 gimple_debug_bind_reset_value (dbg);
1866 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1869 lp_nr = EH_LANDING_PAD_NR (label);
1870 if (lp_nr)
1872 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1873 lp->post_landing_pad = NULL;
1876 else
1878 gimple_set_bb (stmt, a);
1879 gsi_next (&gsi);
1883 /* When merging two BBs, if their counts are different, the larger count
1884 is selected as the new bb count. This is to handle inconsistent
1885 profiles. */
1886 if (a->loop_father == b->loop_father)
1888 a->count = MAX (a->count, b->count);
1889 a->frequency = MAX (a->frequency, b->frequency);
1892 /* Merge the sequences. */
1893 last = gsi_last_bb (a);
1894 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1895 set_bb_seq (b, NULL);
1897 if (cfgcleanup_altered_bbs)
1898 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1902 /* Return the one of two successors of BB that is not reachable by a
1903 complex edge, if there is one. Else, return BB. We use
1904 this in optimizations that use post-dominators for their heuristics,
1905 to catch the cases in C++ where function calls are involved. */
1907 basic_block
1908 single_noncomplex_succ (basic_block bb)
1910 edge e0, e1;
1911 if (EDGE_COUNT (bb->succs) != 2)
1912 return bb;
1914 e0 = EDGE_SUCC (bb, 0);
1915 e1 = EDGE_SUCC (bb, 1);
1916 if (e0->flags & EDGE_COMPLEX)
1917 return e1->dest;
1918 if (e1->flags & EDGE_COMPLEX)
1919 return e0->dest;
1921 return bb;
1924 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1926 void
1927 notice_special_calls (gimple call)
1929 int flags = gimple_call_flags (call);
1931 if (flags & ECF_MAY_BE_ALLOCA)
1932 cfun->calls_alloca = true;
1933 if (flags & ECF_RETURNS_TWICE)
1934 cfun->calls_setjmp = true;
1938 /* Clear flags set by notice_special_calls. Used by dead code removal
1939 to update the flags. */
1941 void
1942 clear_special_calls (void)
1944 cfun->calls_alloca = false;
1945 cfun->calls_setjmp = false;
1948 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1950 static void
1951 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1953 /* Since this block is no longer reachable, we can just delete all
1954 of its PHI nodes. */
1955 remove_phi_nodes (bb);
1957 /* Remove edges to BB's successors. */
1958 while (EDGE_COUNT (bb->succs) > 0)
1959 remove_edge (EDGE_SUCC (bb, 0));
1963 /* Remove statements of basic block BB. */
1965 static void
1966 remove_bb (basic_block bb)
1968 gimple_stmt_iterator i;
1970 if (dump_file)
1972 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1973 if (dump_flags & TDF_DETAILS)
1975 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
1976 fprintf (dump_file, "\n");
1980 if (current_loops)
1982 struct loop *loop = bb->loop_father;
1984 /* If a loop gets removed, clean up the information associated
1985 with it. */
1986 if (loop->latch == bb
1987 || loop->header == bb)
1988 free_numbers_of_iterations_estimates_loop (loop);
1991 /* Remove all the instructions in the block. */
1992 if (bb_seq (bb) != NULL)
1994 /* Walk backwards so as to get a chance to substitute all
1995 released DEFs into debug stmts. See
1996 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1997 details. */
1998 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2000 gimple stmt = gsi_stmt (i);
2001 if (gimple_code (stmt) == GIMPLE_LABEL
2002 && (FORCED_LABEL (gimple_label_label (stmt))
2003 || DECL_NONLOCAL (gimple_label_label (stmt))))
2005 basic_block new_bb;
2006 gimple_stmt_iterator new_gsi;
2008 /* A non-reachable non-local label may still be referenced.
2009 But it no longer needs to carry the extra semantics of
2010 non-locality. */
2011 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2013 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2014 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2017 new_bb = bb->prev_bb;
2018 new_gsi = gsi_start_bb (new_bb);
2019 gsi_remove (&i, false);
2020 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2022 else
2024 /* Release SSA definitions if we are in SSA. Note that we
2025 may be called when not in SSA. For example,
2026 final_cleanup calls this function via
2027 cleanup_tree_cfg. */
2028 if (gimple_in_ssa_p (cfun))
2029 release_defs (stmt);
2031 gsi_remove (&i, true);
2034 if (gsi_end_p (i))
2035 i = gsi_last_bb (bb);
2036 else
2037 gsi_prev (&i);
2041 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2042 bb->il.gimple.seq = NULL;
2043 bb->il.gimple.phi_nodes = NULL;
2047 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2048 predicate VAL, return the edge that will be taken out of the block.
2049 If VAL does not match a unique edge, NULL is returned. */
2051 edge
2052 find_taken_edge (basic_block bb, tree val)
2054 gimple stmt;
2056 stmt = last_stmt (bb);
2058 gcc_assert (stmt);
2059 gcc_assert (is_ctrl_stmt (stmt));
2061 if (val == NULL)
2062 return NULL;
2064 if (!is_gimple_min_invariant (val))
2065 return NULL;
2067 if (gimple_code (stmt) == GIMPLE_COND)
2068 return find_taken_edge_cond_expr (bb, val);
2070 if (gimple_code (stmt) == GIMPLE_SWITCH)
2071 return find_taken_edge_switch_expr (bb, val);
2073 if (computed_goto_p (stmt))
2075 /* Only optimize if the argument is a label, if the argument is
2076 not a label then we can not construct a proper CFG.
2078 It may be the case that we only need to allow the LABEL_REF to
2079 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2080 appear inside a LABEL_EXPR just to be safe. */
2081 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2082 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2083 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2084 return NULL;
2087 gcc_unreachable ();
2090 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2091 statement, determine which of the outgoing edges will be taken out of the
2092 block. Return NULL if either edge may be taken. */
2094 static edge
2095 find_taken_edge_computed_goto (basic_block bb, tree val)
2097 basic_block dest;
2098 edge e = NULL;
2100 dest = label_to_block (val);
2101 if (dest)
2103 e = find_edge (bb, dest);
2104 gcc_assert (e != NULL);
2107 return e;
2110 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2111 statement, determine which of the two edges will be taken out of the
2112 block. Return NULL if either edge may be taken. */
2114 static edge
2115 find_taken_edge_cond_expr (basic_block bb, tree val)
2117 edge true_edge, false_edge;
2119 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2121 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2122 return (integer_zerop (val) ? false_edge : true_edge);
2125 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2126 statement, determine which edge will be taken out of the block. Return
2127 NULL if any edge may be taken. */
2129 static edge
2130 find_taken_edge_switch_expr (basic_block bb, tree val)
2132 basic_block dest_bb;
2133 edge e;
2134 gimple switch_stmt;
2135 tree taken_case;
2137 switch_stmt = last_stmt (bb);
2138 taken_case = find_case_label_for_value (switch_stmt, val);
2139 dest_bb = label_to_block (CASE_LABEL (taken_case));
2141 e = find_edge (bb, dest_bb);
2142 gcc_assert (e);
2143 return e;
2147 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2148 We can make optimal use here of the fact that the case labels are
2149 sorted: We can do a binary search for a case matching VAL. */
2151 static tree
2152 find_case_label_for_value (gimple switch_stmt, tree val)
2154 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2155 tree default_case = gimple_switch_default_label (switch_stmt);
2157 for (low = 0, high = n; high - low > 1; )
2159 size_t i = (high + low) / 2;
2160 tree t = gimple_switch_label (switch_stmt, i);
2161 int cmp;
2163 /* Cache the result of comparing CASE_LOW and val. */
2164 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2166 if (cmp > 0)
2167 high = i;
2168 else
2169 low = i;
2171 if (CASE_HIGH (t) == NULL)
2173 /* A singe-valued case label. */
2174 if (cmp == 0)
2175 return t;
2177 else
2179 /* A case range. We can only handle integer ranges. */
2180 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2181 return t;
2185 return default_case;
2189 /* Dump a basic block on stderr. */
2191 void
2192 gimple_debug_bb (basic_block bb)
2194 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2198 /* Dump basic block with index N on stderr. */
2200 basic_block
2201 gimple_debug_bb_n (int n)
2203 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2204 return BASIC_BLOCK_FOR_FN (cfun, n);
2208 /* Dump the CFG on stderr.
2210 FLAGS are the same used by the tree dumping functions
2211 (see TDF_* in dumpfile.h). */
2213 void
2214 gimple_debug_cfg (int flags)
2216 gimple_dump_cfg (stderr, flags);
2220 /* Dump the program showing basic block boundaries on the given FILE.
2222 FLAGS are the same used by the tree dumping functions (see TDF_* in
2223 tree.h). */
2225 void
2226 gimple_dump_cfg (FILE *file, int flags)
2228 if (flags & TDF_DETAILS)
2230 dump_function_header (file, current_function_decl, flags);
2231 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2232 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2233 last_basic_block_for_fn (cfun));
2235 brief_dump_cfg (file, flags | TDF_COMMENT);
2236 fprintf (file, "\n");
2239 if (flags & TDF_STATS)
2240 dump_cfg_stats (file);
2242 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2246 /* Dump CFG statistics on FILE. */
2248 void
2249 dump_cfg_stats (FILE *file)
2251 static long max_num_merged_labels = 0;
2252 unsigned long size, total = 0;
2253 long num_edges;
2254 basic_block bb;
2255 const char * const fmt_str = "%-30s%-13s%12s\n";
2256 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2257 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2258 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2259 const char *funcname = current_function_name ();
2261 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2263 fprintf (file, "---------------------------------------------------------\n");
2264 fprintf (file, fmt_str, "", " Number of ", "Memory");
2265 fprintf (file, fmt_str, "", " instances ", "used ");
2266 fprintf (file, "---------------------------------------------------------\n");
2268 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2269 total += size;
2270 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2271 SCALE (size), LABEL (size));
2273 num_edges = 0;
2274 FOR_EACH_BB_FN (bb, cfun)
2275 num_edges += EDGE_COUNT (bb->succs);
2276 size = num_edges * sizeof (struct edge_def);
2277 total += size;
2278 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2280 fprintf (file, "---------------------------------------------------------\n");
2281 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2282 LABEL (total));
2283 fprintf (file, "---------------------------------------------------------\n");
2284 fprintf (file, "\n");
2286 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2287 max_num_merged_labels = cfg_stats.num_merged_labels;
2289 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2290 cfg_stats.num_merged_labels, max_num_merged_labels);
2292 fprintf (file, "\n");
2296 /* Dump CFG statistics on stderr. Keep extern so that it's always
2297 linked in the final executable. */
2299 DEBUG_FUNCTION void
2300 debug_cfg_stats (void)
2302 dump_cfg_stats (stderr);
2305 /*---------------------------------------------------------------------------
2306 Miscellaneous helpers
2307 ---------------------------------------------------------------------------*/
2309 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2310 flow. Transfers of control flow associated with EH are excluded. */
2312 static bool
2313 call_can_make_abnormal_goto (gimple t)
2315 /* If the function has no non-local labels, then a call cannot make an
2316 abnormal transfer of control. */
2317 if (!cfun->has_nonlocal_label
2318 && !cfun->calls_setjmp)
2319 return false;
2321 /* Likewise if the call has no side effects. */
2322 if (!gimple_has_side_effects (t))
2323 return false;
2325 /* Likewise if the called function is leaf. */
2326 if (gimple_call_flags (t) & ECF_LEAF)
2327 return false;
2329 return true;
2333 /* Return true if T can make an abnormal transfer of control flow.
2334 Transfers of control flow associated with EH are excluded. */
2336 bool
2337 stmt_can_make_abnormal_goto (gimple t)
2339 if (computed_goto_p (t))
2340 return true;
2341 if (is_gimple_call (t))
2342 return call_can_make_abnormal_goto (t);
2343 return false;
2347 /* Return true if T represents a stmt that always transfers control. */
2349 bool
2350 is_ctrl_stmt (gimple t)
2352 switch (gimple_code (t))
2354 case GIMPLE_COND:
2355 case GIMPLE_SWITCH:
2356 case GIMPLE_GOTO:
2357 case GIMPLE_RETURN:
2358 case GIMPLE_RESX:
2359 return true;
2360 default:
2361 return false;
2366 /* Return true if T is a statement that may alter the flow of control
2367 (e.g., a call to a non-returning function). */
2369 bool
2370 is_ctrl_altering_stmt (gimple t)
2372 gcc_assert (t);
2374 switch (gimple_code (t))
2376 case GIMPLE_CALL:
2378 int flags = gimple_call_flags (t);
2380 /* A call alters control flow if it can make an abnormal goto. */
2381 if (call_can_make_abnormal_goto (t))
2382 return true;
2384 /* A call also alters control flow if it does not return. */
2385 if (flags & ECF_NORETURN)
2386 return true;
2388 /* TM ending statements have backedges out of the transaction.
2389 Return true so we split the basic block containing them.
2390 Note that the TM_BUILTIN test is merely an optimization. */
2391 if ((flags & ECF_TM_BUILTIN)
2392 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2393 return true;
2395 /* BUILT_IN_RETURN call is same as return statement. */
2396 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2397 return true;
2399 break;
2401 case GIMPLE_EH_DISPATCH:
2402 /* EH_DISPATCH branches to the individual catch handlers at
2403 this level of a try or allowed-exceptions region. It can
2404 fallthru to the next statement as well. */
2405 return true;
2407 case GIMPLE_ASM:
2408 if (gimple_asm_nlabels (t) > 0)
2409 return true;
2410 break;
2412 CASE_GIMPLE_OMP:
2413 /* OpenMP directives alter control flow. */
2414 return true;
2416 case GIMPLE_TRANSACTION:
2417 /* A transaction start alters control flow. */
2418 return true;
2420 default:
2421 break;
2424 /* If a statement can throw, it alters control flow. */
2425 return stmt_can_throw_internal (t);
2429 /* Return true if T is a simple local goto. */
2431 bool
2432 simple_goto_p (gimple t)
2434 return (gimple_code (t) == GIMPLE_GOTO
2435 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2439 /* Return true if STMT should start a new basic block. PREV_STMT is
2440 the statement preceding STMT. It is used when STMT is a label or a
2441 case label. Labels should only start a new basic block if their
2442 previous statement wasn't a label. Otherwise, sequence of labels
2443 would generate unnecessary basic blocks that only contain a single
2444 label. */
2446 static inline bool
2447 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2449 if (stmt == NULL)
2450 return false;
2452 /* Labels start a new basic block only if the preceding statement
2453 wasn't a label of the same type. This prevents the creation of
2454 consecutive blocks that have nothing but a single label. */
2455 if (gimple_code (stmt) == GIMPLE_LABEL)
2457 /* Nonlocal and computed GOTO targets always start a new block. */
2458 if (DECL_NONLOCAL (gimple_label_label (stmt))
2459 || FORCED_LABEL (gimple_label_label (stmt)))
2460 return true;
2462 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2464 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2465 return true;
2467 cfg_stats.num_merged_labels++;
2468 return false;
2470 else
2471 return true;
2473 else if (gimple_code (stmt) == GIMPLE_CALL
2474 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2475 /* setjmp acts similar to a nonlocal GOTO target and thus should
2476 start a new block. */
2477 return true;
2479 return false;
2483 /* Return true if T should end a basic block. */
2485 bool
2486 stmt_ends_bb_p (gimple t)
2488 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2491 /* Remove block annotations and other data structures. */
2493 void
2494 delete_tree_cfg_annotations (void)
2496 vec_free (label_to_block_map_for_fn (cfun));
2500 /* Return the first statement in basic block BB. */
2502 gimple
2503 first_stmt (basic_block bb)
2505 gimple_stmt_iterator i = gsi_start_bb (bb);
2506 gimple stmt = NULL;
2508 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2510 gsi_next (&i);
2511 stmt = NULL;
2513 return stmt;
2516 /* Return the first non-label statement in basic block BB. */
2518 static gimple
2519 first_non_label_stmt (basic_block bb)
2521 gimple_stmt_iterator i = gsi_start_bb (bb);
2522 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2523 gsi_next (&i);
2524 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2527 /* Return the last statement in basic block BB. */
2529 gimple
2530 last_stmt (basic_block bb)
2532 gimple_stmt_iterator i = gsi_last_bb (bb);
2533 gimple stmt = NULL;
2535 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2537 gsi_prev (&i);
2538 stmt = NULL;
2540 return stmt;
2543 /* Return the last statement of an otherwise empty block. Return NULL
2544 if the block is totally empty, or if it contains more than one
2545 statement. */
2547 gimple
2548 last_and_only_stmt (basic_block bb)
2550 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2551 gimple last, prev;
2553 if (gsi_end_p (i))
2554 return NULL;
2556 last = gsi_stmt (i);
2557 gsi_prev_nondebug (&i);
2558 if (gsi_end_p (i))
2559 return last;
2561 /* Empty statements should no longer appear in the instruction stream.
2562 Everything that might have appeared before should be deleted by
2563 remove_useless_stmts, and the optimizers should just gsi_remove
2564 instead of smashing with build_empty_stmt.
2566 Thus the only thing that should appear here in a block containing
2567 one executable statement is a label. */
2568 prev = gsi_stmt (i);
2569 if (gimple_code (prev) == GIMPLE_LABEL)
2570 return last;
2571 else
2572 return NULL;
2575 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2577 static void
2578 reinstall_phi_args (edge new_edge, edge old_edge)
2580 edge_var_map_vector *v;
2581 edge_var_map *vm;
2582 int i;
2583 gimple_stmt_iterator phis;
2585 v = redirect_edge_var_map_vector (old_edge);
2586 if (!v)
2587 return;
2589 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2590 v->iterate (i, &vm) && !gsi_end_p (phis);
2591 i++, gsi_next (&phis))
2593 gimple phi = gsi_stmt (phis);
2594 tree result = redirect_edge_var_map_result (vm);
2595 tree arg = redirect_edge_var_map_def (vm);
2597 gcc_assert (result == gimple_phi_result (phi));
2599 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2602 redirect_edge_var_map_clear (old_edge);
2605 /* Returns the basic block after which the new basic block created
2606 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2607 near its "logical" location. This is of most help to humans looking
2608 at debugging dumps. */
2610 static basic_block
2611 split_edge_bb_loc (edge edge_in)
2613 basic_block dest = edge_in->dest;
2614 basic_block dest_prev = dest->prev_bb;
2616 if (dest_prev)
2618 edge e = find_edge (dest_prev, dest);
2619 if (e && !(e->flags & EDGE_COMPLEX))
2620 return edge_in->src;
2622 return dest_prev;
2625 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2626 Abort on abnormal edges. */
2628 static basic_block
2629 gimple_split_edge (edge edge_in)
2631 basic_block new_bb, after_bb, dest;
2632 edge new_edge, e;
2634 /* Abnormal edges cannot be split. */
2635 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2637 dest = edge_in->dest;
2639 after_bb = split_edge_bb_loc (edge_in);
2641 new_bb = create_empty_bb (after_bb);
2642 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2643 new_bb->count = edge_in->count;
2644 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2645 new_edge->probability = REG_BR_PROB_BASE;
2646 new_edge->count = edge_in->count;
2648 e = redirect_edge_and_branch (edge_in, new_bb);
2649 gcc_assert (e == edge_in);
2650 reinstall_phi_args (new_edge, e);
2652 return new_bb;
2656 /* Verify properties of the address expression T with base object BASE. */
2658 static tree
2659 verify_address (tree t, tree base)
2661 bool old_constant;
2662 bool old_side_effects;
2663 bool new_constant;
2664 bool new_side_effects;
2666 old_constant = TREE_CONSTANT (t);
2667 old_side_effects = TREE_SIDE_EFFECTS (t);
2669 recompute_tree_invariant_for_addr_expr (t);
2670 new_side_effects = TREE_SIDE_EFFECTS (t);
2671 new_constant = TREE_CONSTANT (t);
2673 if (old_constant != new_constant)
2675 error ("constant not recomputed when ADDR_EXPR changed");
2676 return t;
2678 if (old_side_effects != new_side_effects)
2680 error ("side effects not recomputed when ADDR_EXPR changed");
2681 return t;
2684 if (!(TREE_CODE (base) == VAR_DECL
2685 || TREE_CODE (base) == PARM_DECL
2686 || TREE_CODE (base) == RESULT_DECL))
2687 return NULL_TREE;
2689 if (DECL_GIMPLE_REG_P (base))
2691 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2692 return base;
2695 return NULL_TREE;
2698 /* Callback for walk_tree, check that all elements with address taken are
2699 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2700 inside a PHI node. */
2702 static tree
2703 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2705 tree t = *tp, x;
2707 if (TYPE_P (t))
2708 *walk_subtrees = 0;
2710 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2711 #define CHECK_OP(N, MSG) \
2712 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2713 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2715 switch (TREE_CODE (t))
2717 case SSA_NAME:
2718 if (SSA_NAME_IN_FREE_LIST (t))
2720 error ("SSA name in freelist but still referenced");
2721 return *tp;
2723 break;
2725 case INDIRECT_REF:
2726 error ("INDIRECT_REF in gimple IL");
2727 return t;
2729 case MEM_REF:
2730 x = TREE_OPERAND (t, 0);
2731 if (!POINTER_TYPE_P (TREE_TYPE (x))
2732 || !is_gimple_mem_ref_addr (x))
2734 error ("invalid first operand of MEM_REF");
2735 return x;
2737 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2738 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2740 error ("invalid offset operand of MEM_REF");
2741 return TREE_OPERAND (t, 1);
2743 if (TREE_CODE (x) == ADDR_EXPR
2744 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2745 return x;
2746 *walk_subtrees = 0;
2747 break;
2749 case ASSERT_EXPR:
2750 x = fold (ASSERT_EXPR_COND (t));
2751 if (x == boolean_false_node)
2753 error ("ASSERT_EXPR with an always-false condition");
2754 return *tp;
2756 break;
2758 case MODIFY_EXPR:
2759 error ("MODIFY_EXPR not expected while having tuples");
2760 return *tp;
2762 case ADDR_EXPR:
2764 tree tem;
2766 gcc_assert (is_gimple_address (t));
2768 /* Skip any references (they will be checked when we recurse down the
2769 tree) and ensure that any variable used as a prefix is marked
2770 addressable. */
2771 for (x = TREE_OPERAND (t, 0);
2772 handled_component_p (x);
2773 x = TREE_OPERAND (x, 0))
2776 if ((tem = verify_address (t, x)))
2777 return tem;
2779 if (!(TREE_CODE (x) == VAR_DECL
2780 || TREE_CODE (x) == PARM_DECL
2781 || TREE_CODE (x) == RESULT_DECL))
2782 return NULL;
2784 if (!TREE_ADDRESSABLE (x))
2786 error ("address taken, but ADDRESSABLE bit not set");
2787 return x;
2790 break;
2793 case COND_EXPR:
2794 x = COND_EXPR_COND (t);
2795 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2797 error ("non-integral used in condition");
2798 return x;
2800 if (!is_gimple_condexpr (x))
2802 error ("invalid conditional operand");
2803 return x;
2805 break;
2807 case NON_LVALUE_EXPR:
2808 case TRUTH_NOT_EXPR:
2809 gcc_unreachable ();
2811 CASE_CONVERT:
2812 case FIX_TRUNC_EXPR:
2813 case FLOAT_EXPR:
2814 case NEGATE_EXPR:
2815 case ABS_EXPR:
2816 case BIT_NOT_EXPR:
2817 CHECK_OP (0, "invalid operand to unary operator");
2818 break;
2820 case REALPART_EXPR:
2821 case IMAGPART_EXPR:
2822 case BIT_FIELD_REF:
2823 if (!is_gimple_reg_type (TREE_TYPE (t)))
2825 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2826 return t;
2829 if (TREE_CODE (t) == BIT_FIELD_REF)
2831 tree t0 = TREE_OPERAND (t, 0);
2832 tree t1 = TREE_OPERAND (t, 1);
2833 tree t2 = TREE_OPERAND (t, 2);
2834 if (!tree_fits_uhwi_p (t1)
2835 || !tree_fits_uhwi_p (t2))
2837 error ("invalid position or size operand to BIT_FIELD_REF");
2838 return t;
2840 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2841 && (TYPE_PRECISION (TREE_TYPE (t))
2842 != tree_to_uhwi (t1)))
2844 error ("integral result type precision does not match "
2845 "field size of BIT_FIELD_REF");
2846 return t;
2848 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2849 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2850 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2851 != tree_to_uhwi (t1)))
2853 error ("mode precision of non-integral result does not "
2854 "match field size of BIT_FIELD_REF");
2855 return t;
2857 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2858 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2859 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2861 error ("position plus size exceeds size of referenced object in "
2862 "BIT_FIELD_REF");
2863 return t;
2866 t = TREE_OPERAND (t, 0);
2868 /* Fall-through. */
2869 case COMPONENT_REF:
2870 case ARRAY_REF:
2871 case ARRAY_RANGE_REF:
2872 case VIEW_CONVERT_EXPR:
2873 /* We have a nest of references. Verify that each of the operands
2874 that determine where to reference is either a constant or a variable,
2875 verify that the base is valid, and then show we've already checked
2876 the subtrees. */
2877 while (handled_component_p (t))
2879 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2880 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2881 else if (TREE_CODE (t) == ARRAY_REF
2882 || TREE_CODE (t) == ARRAY_RANGE_REF)
2884 CHECK_OP (1, "invalid array index");
2885 if (TREE_OPERAND (t, 2))
2886 CHECK_OP (2, "invalid array lower bound");
2887 if (TREE_OPERAND (t, 3))
2888 CHECK_OP (3, "invalid array stride");
2890 else if (TREE_CODE (t) == BIT_FIELD_REF
2891 || TREE_CODE (t) == REALPART_EXPR
2892 || TREE_CODE (t) == IMAGPART_EXPR)
2894 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2895 "REALPART_EXPR");
2896 return t;
2899 t = TREE_OPERAND (t, 0);
2902 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2904 error ("invalid reference prefix");
2905 return t;
2907 *walk_subtrees = 0;
2908 break;
2909 case PLUS_EXPR:
2910 case MINUS_EXPR:
2911 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2912 POINTER_PLUS_EXPR. */
2913 if (POINTER_TYPE_P (TREE_TYPE (t)))
2915 error ("invalid operand to plus/minus, type is a pointer");
2916 return t;
2918 CHECK_OP (0, "invalid operand to binary operator");
2919 CHECK_OP (1, "invalid operand to binary operator");
2920 break;
2922 case POINTER_PLUS_EXPR:
2923 /* Check to make sure the first operand is a pointer or reference type. */
2924 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2926 error ("invalid operand to pointer plus, first operand is not a pointer");
2927 return t;
2929 /* Check to make sure the second operand is a ptrofftype. */
2930 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2932 error ("invalid operand to pointer plus, second operand is not an "
2933 "integer type of appropriate width");
2934 return t;
2936 /* FALLTHROUGH */
2937 case LT_EXPR:
2938 case LE_EXPR:
2939 case GT_EXPR:
2940 case GE_EXPR:
2941 case EQ_EXPR:
2942 case NE_EXPR:
2943 case UNORDERED_EXPR:
2944 case ORDERED_EXPR:
2945 case UNLT_EXPR:
2946 case UNLE_EXPR:
2947 case UNGT_EXPR:
2948 case UNGE_EXPR:
2949 case UNEQ_EXPR:
2950 case LTGT_EXPR:
2951 case MULT_EXPR:
2952 case TRUNC_DIV_EXPR:
2953 case CEIL_DIV_EXPR:
2954 case FLOOR_DIV_EXPR:
2955 case ROUND_DIV_EXPR:
2956 case TRUNC_MOD_EXPR:
2957 case CEIL_MOD_EXPR:
2958 case FLOOR_MOD_EXPR:
2959 case ROUND_MOD_EXPR:
2960 case RDIV_EXPR:
2961 case EXACT_DIV_EXPR:
2962 case MIN_EXPR:
2963 case MAX_EXPR:
2964 case LSHIFT_EXPR:
2965 case RSHIFT_EXPR:
2966 case LROTATE_EXPR:
2967 case RROTATE_EXPR:
2968 case BIT_IOR_EXPR:
2969 case BIT_XOR_EXPR:
2970 case BIT_AND_EXPR:
2971 CHECK_OP (0, "invalid operand to binary operator");
2972 CHECK_OP (1, "invalid operand to binary operator");
2973 break;
2975 case CONSTRUCTOR:
2976 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2977 *walk_subtrees = 0;
2978 break;
2980 case CASE_LABEL_EXPR:
2981 if (CASE_CHAIN (t))
2983 error ("invalid CASE_CHAIN");
2984 return t;
2986 break;
2988 default:
2989 break;
2991 return NULL;
2993 #undef CHECK_OP
2997 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2998 Returns true if there is an error, otherwise false. */
3000 static bool
3001 verify_types_in_gimple_min_lval (tree expr)
3003 tree op;
3005 if (is_gimple_id (expr))
3006 return false;
3008 if (TREE_CODE (expr) != TARGET_MEM_REF
3009 && TREE_CODE (expr) != MEM_REF)
3011 error ("invalid expression for min lvalue");
3012 return true;
3015 /* TARGET_MEM_REFs are strange beasts. */
3016 if (TREE_CODE (expr) == TARGET_MEM_REF)
3017 return false;
3019 op = TREE_OPERAND (expr, 0);
3020 if (!is_gimple_val (op))
3022 error ("invalid operand in indirect reference");
3023 debug_generic_stmt (op);
3024 return true;
3026 /* Memory references now generally can involve a value conversion. */
3028 return false;
3031 /* Verify if EXPR is a valid GIMPLE reference expression. If
3032 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3033 if there is an error, otherwise false. */
3035 static bool
3036 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3038 while (handled_component_p (expr))
3040 tree op = TREE_OPERAND (expr, 0);
3042 if (TREE_CODE (expr) == ARRAY_REF
3043 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3045 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3046 || (TREE_OPERAND (expr, 2)
3047 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3048 || (TREE_OPERAND (expr, 3)
3049 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3051 error ("invalid operands to array reference");
3052 debug_generic_stmt (expr);
3053 return true;
3057 /* Verify if the reference array element types are compatible. */
3058 if (TREE_CODE (expr) == ARRAY_REF
3059 && !useless_type_conversion_p (TREE_TYPE (expr),
3060 TREE_TYPE (TREE_TYPE (op))))
3062 error ("type mismatch in array reference");
3063 debug_generic_stmt (TREE_TYPE (expr));
3064 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3065 return true;
3067 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3068 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3069 TREE_TYPE (TREE_TYPE (op))))
3071 error ("type mismatch in array range reference");
3072 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3074 return true;
3077 if ((TREE_CODE (expr) == REALPART_EXPR
3078 || TREE_CODE (expr) == IMAGPART_EXPR)
3079 && !useless_type_conversion_p (TREE_TYPE (expr),
3080 TREE_TYPE (TREE_TYPE (op))))
3082 error ("type mismatch in real/imagpart reference");
3083 debug_generic_stmt (TREE_TYPE (expr));
3084 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3085 return true;
3088 if (TREE_CODE (expr) == COMPONENT_REF
3089 && !useless_type_conversion_p (TREE_TYPE (expr),
3090 TREE_TYPE (TREE_OPERAND (expr, 1))))
3092 error ("type mismatch in component reference");
3093 debug_generic_stmt (TREE_TYPE (expr));
3094 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3095 return true;
3098 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3100 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3101 that their operand is not an SSA name or an invariant when
3102 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3103 bug). Otherwise there is nothing to verify, gross mismatches at
3104 most invoke undefined behavior. */
3105 if (require_lvalue
3106 && (TREE_CODE (op) == SSA_NAME
3107 || is_gimple_min_invariant (op)))
3109 error ("conversion of an SSA_NAME on the left hand side");
3110 debug_generic_stmt (expr);
3111 return true;
3113 else if (TREE_CODE (op) == SSA_NAME
3114 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3116 error ("conversion of register to a different size");
3117 debug_generic_stmt (expr);
3118 return true;
3120 else if (!handled_component_p (op))
3121 return false;
3124 expr = op;
3127 if (TREE_CODE (expr) == MEM_REF)
3129 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3131 error ("invalid address operand in MEM_REF");
3132 debug_generic_stmt (expr);
3133 return true;
3135 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3136 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3138 error ("invalid offset operand in MEM_REF");
3139 debug_generic_stmt (expr);
3140 return true;
3143 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3145 if (!TMR_BASE (expr)
3146 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3148 error ("invalid address operand in TARGET_MEM_REF");
3149 return true;
3151 if (!TMR_OFFSET (expr)
3152 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3153 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3155 error ("invalid offset operand in TARGET_MEM_REF");
3156 debug_generic_stmt (expr);
3157 return true;
3161 return ((require_lvalue || !is_gimple_min_invariant (expr))
3162 && verify_types_in_gimple_min_lval (expr));
3165 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3166 list of pointer-to types that is trivially convertible to DEST. */
3168 static bool
3169 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3171 tree src;
3173 if (!TYPE_POINTER_TO (src_obj))
3174 return true;
3176 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3177 if (useless_type_conversion_p (dest, src))
3178 return true;
3180 return false;
3183 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3184 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3186 static bool
3187 valid_fixed_convert_types_p (tree type1, tree type2)
3189 return (FIXED_POINT_TYPE_P (type1)
3190 && (INTEGRAL_TYPE_P (type2)
3191 || SCALAR_FLOAT_TYPE_P (type2)
3192 || FIXED_POINT_TYPE_P (type2)));
3195 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3196 is a problem, otherwise false. */
3198 static bool
3199 verify_gimple_call (gimple stmt)
3201 tree fn = gimple_call_fn (stmt);
3202 tree fntype, fndecl;
3203 unsigned i;
3205 if (gimple_call_internal_p (stmt))
3207 if (fn)
3209 error ("gimple call has two targets");
3210 debug_generic_stmt (fn);
3211 return true;
3214 else
3216 if (!fn)
3218 error ("gimple call has no target");
3219 return true;
3223 if (fn && !is_gimple_call_addr (fn))
3225 error ("invalid function in gimple call");
3226 debug_generic_stmt (fn);
3227 return true;
3230 if (fn
3231 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3232 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3233 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3235 error ("non-function in gimple call");
3236 return true;
3239 fndecl = gimple_call_fndecl (stmt);
3240 if (fndecl
3241 && TREE_CODE (fndecl) == FUNCTION_DECL
3242 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3243 && !DECL_PURE_P (fndecl)
3244 && !TREE_READONLY (fndecl))
3246 error ("invalid pure const state for function");
3247 return true;
3250 if (gimple_call_lhs (stmt)
3251 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3252 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3254 error ("invalid LHS in gimple call");
3255 return true;
3258 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3260 error ("LHS in noreturn call");
3261 return true;
3264 fntype = gimple_call_fntype (stmt);
3265 if (fntype
3266 && gimple_call_lhs (stmt)
3267 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3268 TREE_TYPE (fntype))
3269 /* ??? At least C++ misses conversions at assignments from
3270 void * call results.
3271 ??? Java is completely off. Especially with functions
3272 returning java.lang.Object.
3273 For now simply allow arbitrary pointer type conversions. */
3274 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3275 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3277 error ("invalid conversion in gimple call");
3278 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3279 debug_generic_stmt (TREE_TYPE (fntype));
3280 return true;
3283 if (gimple_call_chain (stmt)
3284 && !is_gimple_val (gimple_call_chain (stmt)))
3286 error ("invalid static chain in gimple call");
3287 debug_generic_stmt (gimple_call_chain (stmt));
3288 return true;
3291 /* If there is a static chain argument, this should not be an indirect
3292 call, and the decl should have DECL_STATIC_CHAIN set. */
3293 if (gimple_call_chain (stmt))
3295 if (!gimple_call_fndecl (stmt))
3297 error ("static chain in indirect gimple call");
3298 return true;
3300 fn = TREE_OPERAND (fn, 0);
3302 if (!DECL_STATIC_CHAIN (fn))
3304 error ("static chain with function that doesn%'t use one");
3305 return true;
3309 /* ??? The C frontend passes unpromoted arguments in case it
3310 didn't see a function declaration before the call. So for now
3311 leave the call arguments mostly unverified. Once we gimplify
3312 unit-at-a-time we have a chance to fix this. */
3314 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3316 tree arg = gimple_call_arg (stmt, i);
3317 if ((is_gimple_reg_type (TREE_TYPE (arg))
3318 && !is_gimple_val (arg))
3319 || (!is_gimple_reg_type (TREE_TYPE (arg))
3320 && !is_gimple_lvalue (arg)))
3322 error ("invalid argument to gimple call");
3323 debug_generic_expr (arg);
3324 return true;
3328 return false;
3331 /* Verifies the gimple comparison with the result type TYPE and
3332 the operands OP0 and OP1. */
3334 static bool
3335 verify_gimple_comparison (tree type, tree op0, tree op1)
3337 tree op0_type = TREE_TYPE (op0);
3338 tree op1_type = TREE_TYPE (op1);
3340 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3342 error ("invalid operands in gimple comparison");
3343 return true;
3346 /* For comparisons we do not have the operations type as the
3347 effective type the comparison is carried out in. Instead
3348 we require that either the first operand is trivially
3349 convertible into the second, or the other way around.
3350 Because we special-case pointers to void we allow
3351 comparisons of pointers with the same mode as well. */
3352 if (!useless_type_conversion_p (op0_type, op1_type)
3353 && !useless_type_conversion_p (op1_type, op0_type)
3354 && (!POINTER_TYPE_P (op0_type)
3355 || !POINTER_TYPE_P (op1_type)
3356 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3358 error ("mismatching comparison operand types");
3359 debug_generic_expr (op0_type);
3360 debug_generic_expr (op1_type);
3361 return true;
3364 /* The resulting type of a comparison may be an effective boolean type. */
3365 if (INTEGRAL_TYPE_P (type)
3366 && (TREE_CODE (type) == BOOLEAN_TYPE
3367 || TYPE_PRECISION (type) == 1))
3369 if (TREE_CODE (op0_type) == VECTOR_TYPE
3370 || TREE_CODE (op1_type) == VECTOR_TYPE)
3372 error ("vector comparison returning a boolean");
3373 debug_generic_expr (op0_type);
3374 debug_generic_expr (op1_type);
3375 return true;
3378 /* Or an integer vector type with the same size and element count
3379 as the comparison operand types. */
3380 else if (TREE_CODE (type) == VECTOR_TYPE
3381 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3383 if (TREE_CODE (op0_type) != VECTOR_TYPE
3384 || TREE_CODE (op1_type) != VECTOR_TYPE)
3386 error ("non-vector operands in vector comparison");
3387 debug_generic_expr (op0_type);
3388 debug_generic_expr (op1_type);
3389 return true;
3392 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3393 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3394 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3395 /* The result of a vector comparison is of signed
3396 integral type. */
3397 || TYPE_UNSIGNED (TREE_TYPE (type)))
3399 error ("invalid vector comparison resulting type");
3400 debug_generic_expr (type);
3401 return true;
3404 else
3406 error ("bogus comparison result type");
3407 debug_generic_expr (type);
3408 return true;
3411 return false;
3414 /* Verify a gimple assignment statement STMT with an unary rhs.
3415 Returns true if anything is wrong. */
3417 static bool
3418 verify_gimple_assign_unary (gimple stmt)
3420 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3421 tree lhs = gimple_assign_lhs (stmt);
3422 tree lhs_type = TREE_TYPE (lhs);
3423 tree rhs1 = gimple_assign_rhs1 (stmt);
3424 tree rhs1_type = TREE_TYPE (rhs1);
3426 if (!is_gimple_reg (lhs))
3428 error ("non-register as LHS of unary operation");
3429 return true;
3432 if (!is_gimple_val (rhs1))
3434 error ("invalid operand in unary operation");
3435 return true;
3438 /* First handle conversions. */
3439 switch (rhs_code)
3441 CASE_CONVERT:
3443 /* Allow conversions from pointer type to integral type only if
3444 there is no sign or zero extension involved.
3445 For targets were the precision of ptrofftype doesn't match that
3446 of pointers we need to allow arbitrary conversions to ptrofftype. */
3447 if ((POINTER_TYPE_P (lhs_type)
3448 && INTEGRAL_TYPE_P (rhs1_type))
3449 || (POINTER_TYPE_P (rhs1_type)
3450 && INTEGRAL_TYPE_P (lhs_type)
3451 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3452 || ptrofftype_p (sizetype))))
3453 return false;
3455 /* Allow conversion from integral to offset type and vice versa. */
3456 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3457 && INTEGRAL_TYPE_P (rhs1_type))
3458 || (INTEGRAL_TYPE_P (lhs_type)
3459 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3460 return false;
3462 /* Otherwise assert we are converting between types of the
3463 same kind. */
3464 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3466 error ("invalid types in nop conversion");
3467 debug_generic_expr (lhs_type);
3468 debug_generic_expr (rhs1_type);
3469 return true;
3472 return false;
3475 case ADDR_SPACE_CONVERT_EXPR:
3477 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3478 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3479 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3481 error ("invalid types in address space conversion");
3482 debug_generic_expr (lhs_type);
3483 debug_generic_expr (rhs1_type);
3484 return true;
3487 return false;
3490 case FIXED_CONVERT_EXPR:
3492 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3493 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3495 error ("invalid types in fixed-point conversion");
3496 debug_generic_expr (lhs_type);
3497 debug_generic_expr (rhs1_type);
3498 return true;
3501 return false;
3504 case FLOAT_EXPR:
3506 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3507 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3508 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3510 error ("invalid types in conversion to floating point");
3511 debug_generic_expr (lhs_type);
3512 debug_generic_expr (rhs1_type);
3513 return true;
3516 return false;
3519 case FIX_TRUNC_EXPR:
3521 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3522 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3523 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3525 error ("invalid types in conversion to integer");
3526 debug_generic_expr (lhs_type);
3527 debug_generic_expr (rhs1_type);
3528 return true;
3531 return false;
3534 case VEC_UNPACK_HI_EXPR:
3535 case VEC_UNPACK_LO_EXPR:
3536 case REDUC_MAX_EXPR:
3537 case REDUC_MIN_EXPR:
3538 case REDUC_PLUS_EXPR:
3539 case VEC_UNPACK_FLOAT_HI_EXPR:
3540 case VEC_UNPACK_FLOAT_LO_EXPR:
3541 /* FIXME. */
3542 return false;
3544 case NEGATE_EXPR:
3545 case ABS_EXPR:
3546 case BIT_NOT_EXPR:
3547 case PAREN_EXPR:
3548 case NON_LVALUE_EXPR:
3549 case CONJ_EXPR:
3550 break;
3552 default:
3553 gcc_unreachable ();
3556 /* For the remaining codes assert there is no conversion involved. */
3557 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3559 error ("non-trivial conversion in unary operation");
3560 debug_generic_expr (lhs_type);
3561 debug_generic_expr (rhs1_type);
3562 return true;
3565 return false;
3568 /* Verify a gimple assignment statement STMT with a binary rhs.
3569 Returns true if anything is wrong. */
3571 static bool
3572 verify_gimple_assign_binary (gimple stmt)
3574 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3575 tree lhs = gimple_assign_lhs (stmt);
3576 tree lhs_type = TREE_TYPE (lhs);
3577 tree rhs1 = gimple_assign_rhs1 (stmt);
3578 tree rhs1_type = TREE_TYPE (rhs1);
3579 tree rhs2 = gimple_assign_rhs2 (stmt);
3580 tree rhs2_type = TREE_TYPE (rhs2);
3582 if (!is_gimple_reg (lhs))
3584 error ("non-register as LHS of binary operation");
3585 return true;
3588 if (!is_gimple_val (rhs1)
3589 || !is_gimple_val (rhs2))
3591 error ("invalid operands in binary operation");
3592 return true;
3595 /* First handle operations that involve different types. */
3596 switch (rhs_code)
3598 case COMPLEX_EXPR:
3600 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3601 || !(INTEGRAL_TYPE_P (rhs1_type)
3602 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3603 || !(INTEGRAL_TYPE_P (rhs2_type)
3604 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3606 error ("type mismatch in complex expression");
3607 debug_generic_expr (lhs_type);
3608 debug_generic_expr (rhs1_type);
3609 debug_generic_expr (rhs2_type);
3610 return true;
3613 return false;
3616 case LSHIFT_EXPR:
3617 case RSHIFT_EXPR:
3618 case LROTATE_EXPR:
3619 case RROTATE_EXPR:
3621 /* Shifts and rotates are ok on integral types, fixed point
3622 types and integer vector types. */
3623 if ((!INTEGRAL_TYPE_P (rhs1_type)
3624 && !FIXED_POINT_TYPE_P (rhs1_type)
3625 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3626 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3627 || (!INTEGRAL_TYPE_P (rhs2_type)
3628 /* Vector shifts of vectors are also ok. */
3629 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3630 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3631 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3632 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3633 || !useless_type_conversion_p (lhs_type, rhs1_type))
3635 error ("type mismatch in shift expression");
3636 debug_generic_expr (lhs_type);
3637 debug_generic_expr (rhs1_type);
3638 debug_generic_expr (rhs2_type);
3639 return true;
3642 return false;
3645 case VEC_LSHIFT_EXPR:
3646 case VEC_RSHIFT_EXPR:
3648 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3649 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3650 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3651 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3652 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3653 || (!INTEGRAL_TYPE_P (rhs2_type)
3654 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3655 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
3658 error ("type mismatch in vector shift expression");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
3662 return true;
3664 /* For shifting a vector of non-integral components we
3665 only allow shifting by a constant multiple of the element size. */
3666 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3667 && (TREE_CODE (rhs2) != INTEGER_CST
3668 || !div_if_zero_remainder (rhs2,
3669 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3671 error ("non-element sized vector shift of floating point vector");
3672 return true;
3675 return false;
3678 case WIDEN_LSHIFT_EXPR:
3680 if (!INTEGRAL_TYPE_P (lhs_type)
3681 || !INTEGRAL_TYPE_P (rhs1_type)
3682 || TREE_CODE (rhs2) != INTEGER_CST
3683 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3685 error ("type mismatch in widening vector shift expression");
3686 debug_generic_expr (lhs_type);
3687 debug_generic_expr (rhs1_type);
3688 debug_generic_expr (rhs2_type);
3689 return true;
3692 return false;
3695 case VEC_WIDEN_LSHIFT_HI_EXPR:
3696 case VEC_WIDEN_LSHIFT_LO_EXPR:
3698 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3699 || TREE_CODE (lhs_type) != VECTOR_TYPE
3700 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3701 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3702 || TREE_CODE (rhs2) != INTEGER_CST
3703 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3704 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3706 error ("type mismatch in widening vector shift expression");
3707 debug_generic_expr (lhs_type);
3708 debug_generic_expr (rhs1_type);
3709 debug_generic_expr (rhs2_type);
3710 return true;
3713 return false;
3716 case PLUS_EXPR:
3717 case MINUS_EXPR:
3719 tree lhs_etype = lhs_type;
3720 tree rhs1_etype = rhs1_type;
3721 tree rhs2_etype = rhs2_type;
3722 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3724 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3725 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3727 error ("invalid non-vector operands to vector valued plus");
3728 return true;
3730 lhs_etype = TREE_TYPE (lhs_type);
3731 rhs1_etype = TREE_TYPE (rhs1_type);
3732 rhs2_etype = TREE_TYPE (rhs2_type);
3734 if (POINTER_TYPE_P (lhs_etype)
3735 || POINTER_TYPE_P (rhs1_etype)
3736 || POINTER_TYPE_P (rhs2_etype))
3738 error ("invalid (pointer) operands to plus/minus");
3739 return true;
3742 /* Continue with generic binary expression handling. */
3743 break;
3746 case POINTER_PLUS_EXPR:
3748 if (!POINTER_TYPE_P (rhs1_type)
3749 || !useless_type_conversion_p (lhs_type, rhs1_type)
3750 || !ptrofftype_p (rhs2_type))
3752 error ("type mismatch in pointer plus expression");
3753 debug_generic_stmt (lhs_type);
3754 debug_generic_stmt (rhs1_type);
3755 debug_generic_stmt (rhs2_type);
3756 return true;
3759 return false;
3762 case TRUTH_ANDIF_EXPR:
3763 case TRUTH_ORIF_EXPR:
3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3768 gcc_unreachable ();
3770 case LT_EXPR:
3771 case LE_EXPR:
3772 case GT_EXPR:
3773 case GE_EXPR:
3774 case EQ_EXPR:
3775 case NE_EXPR:
3776 case UNORDERED_EXPR:
3777 case ORDERED_EXPR:
3778 case UNLT_EXPR:
3779 case UNLE_EXPR:
3780 case UNGT_EXPR:
3781 case UNGE_EXPR:
3782 case UNEQ_EXPR:
3783 case LTGT_EXPR:
3784 /* Comparisons are also binary, but the result type is not
3785 connected to the operand types. */
3786 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3788 case WIDEN_MULT_EXPR:
3789 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3790 return true;
3791 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3792 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3794 case WIDEN_SUM_EXPR:
3795 case VEC_WIDEN_MULT_HI_EXPR:
3796 case VEC_WIDEN_MULT_LO_EXPR:
3797 case VEC_WIDEN_MULT_EVEN_EXPR:
3798 case VEC_WIDEN_MULT_ODD_EXPR:
3799 case VEC_PACK_TRUNC_EXPR:
3800 case VEC_PACK_SAT_EXPR:
3801 case VEC_PACK_FIX_TRUNC_EXPR:
3802 /* FIXME. */
3803 return false;
3805 case MULT_EXPR:
3806 case MULT_HIGHPART_EXPR:
3807 case TRUNC_DIV_EXPR:
3808 case CEIL_DIV_EXPR:
3809 case FLOOR_DIV_EXPR:
3810 case ROUND_DIV_EXPR:
3811 case TRUNC_MOD_EXPR:
3812 case CEIL_MOD_EXPR:
3813 case FLOOR_MOD_EXPR:
3814 case ROUND_MOD_EXPR:
3815 case RDIV_EXPR:
3816 case EXACT_DIV_EXPR:
3817 case MIN_EXPR:
3818 case MAX_EXPR:
3819 case BIT_IOR_EXPR:
3820 case BIT_XOR_EXPR:
3821 case BIT_AND_EXPR:
3822 /* Continue with generic binary expression handling. */
3823 break;
3825 default:
3826 gcc_unreachable ();
3829 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3830 || !useless_type_conversion_p (lhs_type, rhs2_type))
3832 error ("type mismatch in binary expression");
3833 debug_generic_stmt (lhs_type);
3834 debug_generic_stmt (rhs1_type);
3835 debug_generic_stmt (rhs2_type);
3836 return true;
3839 return false;
3842 /* Verify a gimple assignment statement STMT with a ternary rhs.
3843 Returns true if anything is wrong. */
3845 static bool
3846 verify_gimple_assign_ternary (gimple stmt)
3848 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3849 tree lhs = gimple_assign_lhs (stmt);
3850 tree lhs_type = TREE_TYPE (lhs);
3851 tree rhs1 = gimple_assign_rhs1 (stmt);
3852 tree rhs1_type = TREE_TYPE (rhs1);
3853 tree rhs2 = gimple_assign_rhs2 (stmt);
3854 tree rhs2_type = TREE_TYPE (rhs2);
3855 tree rhs3 = gimple_assign_rhs3 (stmt);
3856 tree rhs3_type = TREE_TYPE (rhs3);
3858 if (!is_gimple_reg (lhs))
3860 error ("non-register as LHS of ternary operation");
3861 return true;
3864 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3865 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3866 || !is_gimple_val (rhs2)
3867 || !is_gimple_val (rhs3))
3869 error ("invalid operands in ternary operation");
3870 return true;
3873 /* First handle operations that involve different types. */
3874 switch (rhs_code)
3876 case WIDEN_MULT_PLUS_EXPR:
3877 case WIDEN_MULT_MINUS_EXPR:
3878 if ((!INTEGRAL_TYPE_P (rhs1_type)
3879 && !FIXED_POINT_TYPE_P (rhs1_type))
3880 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3881 || !useless_type_conversion_p (lhs_type, rhs3_type)
3882 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3883 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3885 error ("type mismatch in widening multiply-accumulate expression");
3886 debug_generic_expr (lhs_type);
3887 debug_generic_expr (rhs1_type);
3888 debug_generic_expr (rhs2_type);
3889 debug_generic_expr (rhs3_type);
3890 return true;
3892 break;
3894 case FMA_EXPR:
3895 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3896 || !useless_type_conversion_p (lhs_type, rhs2_type)
3897 || !useless_type_conversion_p (lhs_type, rhs3_type))
3899 error ("type mismatch in fused multiply-add expression");
3900 debug_generic_expr (lhs_type);
3901 debug_generic_expr (rhs1_type);
3902 debug_generic_expr (rhs2_type);
3903 debug_generic_expr (rhs3_type);
3904 return true;
3906 break;
3908 case COND_EXPR:
3909 case VEC_COND_EXPR:
3910 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3911 || !useless_type_conversion_p (lhs_type, rhs3_type))
3913 error ("type mismatch in conditional expression");
3914 debug_generic_expr (lhs_type);
3915 debug_generic_expr (rhs2_type);
3916 debug_generic_expr (rhs3_type);
3917 return true;
3919 break;
3921 case VEC_PERM_EXPR:
3922 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3923 || !useless_type_conversion_p (lhs_type, rhs2_type))
3925 error ("type mismatch in vector permute expression");
3926 debug_generic_expr (lhs_type);
3927 debug_generic_expr (rhs1_type);
3928 debug_generic_expr (rhs2_type);
3929 debug_generic_expr (rhs3_type);
3930 return true;
3933 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3934 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3935 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3937 error ("vector types expected in vector permute expression");
3938 debug_generic_expr (lhs_type);
3939 debug_generic_expr (rhs1_type);
3940 debug_generic_expr (rhs2_type);
3941 debug_generic_expr (rhs3_type);
3942 return true;
3945 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3946 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3947 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3948 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3949 != TYPE_VECTOR_SUBPARTS (lhs_type))
3951 error ("vectors with different element number found "
3952 "in vector permute expression");
3953 debug_generic_expr (lhs_type);
3954 debug_generic_expr (rhs1_type);
3955 debug_generic_expr (rhs2_type);
3956 debug_generic_expr (rhs3_type);
3957 return true;
3960 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3961 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3962 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3964 error ("invalid mask type in vector permute expression");
3965 debug_generic_expr (lhs_type);
3966 debug_generic_expr (rhs1_type);
3967 debug_generic_expr (rhs2_type);
3968 debug_generic_expr (rhs3_type);
3969 return true;
3972 return false;
3974 case DOT_PROD_EXPR:
3975 case REALIGN_LOAD_EXPR:
3976 /* FIXME. */
3977 return false;
3979 default:
3980 gcc_unreachable ();
3982 return false;
3985 /* Verify a gimple assignment statement STMT with a single rhs.
3986 Returns true if anything is wrong. */
3988 static bool
3989 verify_gimple_assign_single (gimple stmt)
3991 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3992 tree lhs = gimple_assign_lhs (stmt);
3993 tree lhs_type = TREE_TYPE (lhs);
3994 tree rhs1 = gimple_assign_rhs1 (stmt);
3995 tree rhs1_type = TREE_TYPE (rhs1);
3996 bool res = false;
3998 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4000 error ("non-trivial conversion at assignment");
4001 debug_generic_expr (lhs_type);
4002 debug_generic_expr (rhs1_type);
4003 return true;
4006 if (gimple_clobber_p (stmt)
4007 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4009 error ("non-decl/MEM_REF LHS in clobber statement");
4010 debug_generic_expr (lhs);
4011 return true;
4014 if (handled_component_p (lhs)
4015 || TREE_CODE (lhs) == MEM_REF
4016 || TREE_CODE (lhs) == TARGET_MEM_REF)
4017 res |= verify_types_in_gimple_reference (lhs, true);
4019 /* Special codes we cannot handle via their class. */
4020 switch (rhs_code)
4022 case ADDR_EXPR:
4024 tree op = TREE_OPERAND (rhs1, 0);
4025 if (!is_gimple_addressable (op))
4027 error ("invalid operand in unary expression");
4028 return true;
4031 /* Technically there is no longer a need for matching types, but
4032 gimple hygiene asks for this check. In LTO we can end up
4033 combining incompatible units and thus end up with addresses
4034 of globals that change their type to a common one. */
4035 if (!in_lto_p
4036 && !types_compatible_p (TREE_TYPE (op),
4037 TREE_TYPE (TREE_TYPE (rhs1)))
4038 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4039 TREE_TYPE (op)))
4041 error ("type mismatch in address expression");
4042 debug_generic_stmt (TREE_TYPE (rhs1));
4043 debug_generic_stmt (TREE_TYPE (op));
4044 return true;
4047 return verify_types_in_gimple_reference (op, true);
4050 /* tcc_reference */
4051 case INDIRECT_REF:
4052 error ("INDIRECT_REF in gimple IL");
4053 return true;
4055 case COMPONENT_REF:
4056 case BIT_FIELD_REF:
4057 case ARRAY_REF:
4058 case ARRAY_RANGE_REF:
4059 case VIEW_CONVERT_EXPR:
4060 case REALPART_EXPR:
4061 case IMAGPART_EXPR:
4062 case TARGET_MEM_REF:
4063 case MEM_REF:
4064 if (!is_gimple_reg (lhs)
4065 && is_gimple_reg_type (TREE_TYPE (lhs)))
4067 error ("invalid rhs for gimple memory store");
4068 debug_generic_stmt (lhs);
4069 debug_generic_stmt (rhs1);
4070 return true;
4072 return res || verify_types_in_gimple_reference (rhs1, false);
4074 /* tcc_constant */
4075 case SSA_NAME:
4076 case INTEGER_CST:
4077 case REAL_CST:
4078 case FIXED_CST:
4079 case COMPLEX_CST:
4080 case VECTOR_CST:
4081 case STRING_CST:
4082 return res;
4084 /* tcc_declaration */
4085 case CONST_DECL:
4086 return res;
4087 case VAR_DECL:
4088 case PARM_DECL:
4089 if (!is_gimple_reg (lhs)
4090 && !is_gimple_reg (rhs1)
4091 && is_gimple_reg_type (TREE_TYPE (lhs)))
4093 error ("invalid rhs for gimple memory store");
4094 debug_generic_stmt (lhs);
4095 debug_generic_stmt (rhs1);
4096 return true;
4098 return res;
4100 case CONSTRUCTOR:
4101 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4103 unsigned int i;
4104 tree elt_i, elt_v, elt_t = NULL_TREE;
4106 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4107 return res;
4108 /* For vector CONSTRUCTORs we require that either it is empty
4109 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4110 (then the element count must be correct to cover the whole
4111 outer vector and index must be NULL on all elements, or it is
4112 a CONSTRUCTOR of scalar elements, where we as an exception allow
4113 smaller number of elements (assuming zero filling) and
4114 consecutive indexes as compared to NULL indexes (such
4115 CONSTRUCTORs can appear in the IL from FEs). */
4116 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4118 if (elt_t == NULL_TREE)
4120 elt_t = TREE_TYPE (elt_v);
4121 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4123 tree elt_t = TREE_TYPE (elt_v);
4124 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4125 TREE_TYPE (elt_t)))
4127 error ("incorrect type of vector CONSTRUCTOR"
4128 " elements");
4129 debug_generic_stmt (rhs1);
4130 return true;
4132 else if (CONSTRUCTOR_NELTS (rhs1)
4133 * TYPE_VECTOR_SUBPARTS (elt_t)
4134 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4136 error ("incorrect number of vector CONSTRUCTOR"
4137 " elements");
4138 debug_generic_stmt (rhs1);
4139 return true;
4142 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4143 elt_t))
4145 error ("incorrect type of vector CONSTRUCTOR elements");
4146 debug_generic_stmt (rhs1);
4147 return true;
4149 else if (CONSTRUCTOR_NELTS (rhs1)
4150 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4152 error ("incorrect number of vector CONSTRUCTOR elements");
4153 debug_generic_stmt (rhs1);
4154 return true;
4157 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4159 error ("incorrect type of vector CONSTRUCTOR elements");
4160 debug_generic_stmt (rhs1);
4161 return true;
4163 if (elt_i != NULL_TREE
4164 && (TREE_CODE (elt_t) == VECTOR_TYPE
4165 || TREE_CODE (elt_i) != INTEGER_CST
4166 || compare_tree_int (elt_i, i) != 0))
4168 error ("vector CONSTRUCTOR with non-NULL element index");
4169 debug_generic_stmt (rhs1);
4170 return true;
4174 return res;
4175 case OBJ_TYPE_REF:
4176 case ASSERT_EXPR:
4177 case WITH_SIZE_EXPR:
4178 /* FIXME. */
4179 return res;
4181 default:;
4184 return res;
4187 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4188 is a problem, otherwise false. */
4190 static bool
4191 verify_gimple_assign (gimple stmt)
4193 switch (gimple_assign_rhs_class (stmt))
4195 case GIMPLE_SINGLE_RHS:
4196 return verify_gimple_assign_single (stmt);
4198 case GIMPLE_UNARY_RHS:
4199 return verify_gimple_assign_unary (stmt);
4201 case GIMPLE_BINARY_RHS:
4202 return verify_gimple_assign_binary (stmt);
4204 case GIMPLE_TERNARY_RHS:
4205 return verify_gimple_assign_ternary (stmt);
4207 default:
4208 gcc_unreachable ();
4212 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4213 is a problem, otherwise false. */
4215 static bool
4216 verify_gimple_return (gimple stmt)
4218 tree op = gimple_return_retval (stmt);
4219 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4221 /* We cannot test for present return values as we do not fix up missing
4222 return values from the original source. */
4223 if (op == NULL)
4224 return false;
4226 if (!is_gimple_val (op)
4227 && TREE_CODE (op) != RESULT_DECL)
4229 error ("invalid operand in return statement");
4230 debug_generic_stmt (op);
4231 return true;
4234 if ((TREE_CODE (op) == RESULT_DECL
4235 && DECL_BY_REFERENCE (op))
4236 || (TREE_CODE (op) == SSA_NAME
4237 && SSA_NAME_VAR (op)
4238 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4239 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4240 op = TREE_TYPE (op);
4242 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4244 error ("invalid conversion in return statement");
4245 debug_generic_stmt (restype);
4246 debug_generic_stmt (TREE_TYPE (op));
4247 return true;
4250 return false;
4254 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4255 is a problem, otherwise false. */
4257 static bool
4258 verify_gimple_goto (gimple stmt)
4260 tree dest = gimple_goto_dest (stmt);
4262 /* ??? We have two canonical forms of direct goto destinations, a
4263 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4264 if (TREE_CODE (dest) != LABEL_DECL
4265 && (!is_gimple_val (dest)
4266 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4268 error ("goto destination is neither a label nor a pointer");
4269 return true;
4272 return false;
4275 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4276 is a problem, otherwise false. */
4278 static bool
4279 verify_gimple_switch (gimple stmt)
4281 unsigned int i, n;
4282 tree elt, prev_upper_bound = NULL_TREE;
4283 tree index_type, elt_type = NULL_TREE;
4285 if (!is_gimple_val (gimple_switch_index (stmt)))
4287 error ("invalid operand to switch statement");
4288 debug_generic_stmt (gimple_switch_index (stmt));
4289 return true;
4292 index_type = TREE_TYPE (gimple_switch_index (stmt));
4293 if (! INTEGRAL_TYPE_P (index_type))
4295 error ("non-integral type switch statement");
4296 debug_generic_expr (index_type);
4297 return true;
4300 elt = gimple_switch_label (stmt, 0);
4301 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4303 error ("invalid default case label in switch statement");
4304 debug_generic_expr (elt);
4305 return true;
4308 n = gimple_switch_num_labels (stmt);
4309 for (i = 1; i < n; i++)
4311 elt = gimple_switch_label (stmt, i);
4313 if (! CASE_LOW (elt))
4315 error ("invalid case label in switch statement");
4316 debug_generic_expr (elt);
4317 return true;
4319 if (CASE_HIGH (elt)
4320 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4322 error ("invalid case range in switch statement");
4323 debug_generic_expr (elt);
4324 return true;
4327 if (elt_type)
4329 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4330 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4332 error ("type mismatch for case label in switch statement");
4333 debug_generic_expr (elt);
4334 return true;
4337 else
4339 elt_type = TREE_TYPE (CASE_LOW (elt));
4340 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4342 error ("type precision mismatch in switch statement");
4343 return true;
4347 if (prev_upper_bound)
4349 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4351 error ("case labels not sorted in switch statement");
4352 return true;
4356 prev_upper_bound = CASE_HIGH (elt);
4357 if (! prev_upper_bound)
4358 prev_upper_bound = CASE_LOW (elt);
4361 return false;
4364 /* Verify a gimple debug statement STMT.
4365 Returns true if anything is wrong. */
4367 static bool
4368 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4370 /* There isn't much that could be wrong in a gimple debug stmt. A
4371 gimple debug bind stmt, for example, maps a tree, that's usually
4372 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4373 component or member of an aggregate type, to another tree, that
4374 can be an arbitrary expression. These stmts expand into debug
4375 insns, and are converted to debug notes by var-tracking.c. */
4376 return false;
4379 /* Verify a gimple label statement STMT.
4380 Returns true if anything is wrong. */
4382 static bool
4383 verify_gimple_label (gimple stmt)
4385 tree decl = gimple_label_label (stmt);
4386 int uid;
4387 bool err = false;
4389 if (TREE_CODE (decl) != LABEL_DECL)
4390 return true;
4391 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4392 && DECL_CONTEXT (decl) != current_function_decl)
4394 error ("label's context is not the current function decl");
4395 err |= true;
4398 uid = LABEL_DECL_UID (decl);
4399 if (cfun->cfg
4400 && (uid == -1
4401 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4403 error ("incorrect entry in label_to_block_map");
4404 err |= true;
4407 uid = EH_LANDING_PAD_NR (decl);
4408 if (uid)
4410 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4411 if (decl != lp->post_landing_pad)
4413 error ("incorrect setting of landing pad number");
4414 err |= true;
4418 return err;
4421 /* Verify the GIMPLE statement STMT. Returns true if there is an
4422 error, otherwise false. */
4424 static bool
4425 verify_gimple_stmt (gimple stmt)
4427 switch (gimple_code (stmt))
4429 case GIMPLE_ASSIGN:
4430 return verify_gimple_assign (stmt);
4432 case GIMPLE_LABEL:
4433 return verify_gimple_label (stmt);
4435 case GIMPLE_CALL:
4436 return verify_gimple_call (stmt);
4438 case GIMPLE_COND:
4439 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4441 error ("invalid comparison code in gimple cond");
4442 return true;
4444 if (!(!gimple_cond_true_label (stmt)
4445 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4446 || !(!gimple_cond_false_label (stmt)
4447 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4449 error ("invalid labels in gimple cond");
4450 return true;
4453 return verify_gimple_comparison (boolean_type_node,
4454 gimple_cond_lhs (stmt),
4455 gimple_cond_rhs (stmt));
4457 case GIMPLE_GOTO:
4458 return verify_gimple_goto (stmt);
4460 case GIMPLE_SWITCH:
4461 return verify_gimple_switch (stmt);
4463 case GIMPLE_RETURN:
4464 return verify_gimple_return (stmt);
4466 case GIMPLE_ASM:
4467 return false;
4469 case GIMPLE_TRANSACTION:
4470 return verify_gimple_transaction (stmt);
4472 /* Tuples that do not have tree operands. */
4473 case GIMPLE_NOP:
4474 case GIMPLE_PREDICT:
4475 case GIMPLE_RESX:
4476 case GIMPLE_EH_DISPATCH:
4477 case GIMPLE_EH_MUST_NOT_THROW:
4478 return false;
4480 CASE_GIMPLE_OMP:
4481 /* OpenMP directives are validated by the FE and never operated
4482 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4483 non-gimple expressions when the main index variable has had
4484 its address taken. This does not affect the loop itself
4485 because the header of an GIMPLE_OMP_FOR is merely used to determine
4486 how to setup the parallel iteration. */
4487 return false;
4489 case GIMPLE_DEBUG:
4490 return verify_gimple_debug (stmt);
4492 default:
4493 gcc_unreachable ();
4497 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4498 and false otherwise. */
4500 static bool
4501 verify_gimple_phi (gimple phi)
4503 bool err = false;
4504 unsigned i;
4505 tree phi_result = gimple_phi_result (phi);
4506 bool virtual_p;
4508 if (!phi_result)
4510 error ("invalid PHI result");
4511 return true;
4514 virtual_p = virtual_operand_p (phi_result);
4515 if (TREE_CODE (phi_result) != SSA_NAME
4516 || (virtual_p
4517 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4519 error ("invalid PHI result");
4520 err = true;
4523 for (i = 0; i < gimple_phi_num_args (phi); i++)
4525 tree t = gimple_phi_arg_def (phi, i);
4527 if (!t)
4529 error ("missing PHI def");
4530 err |= true;
4531 continue;
4533 /* Addressable variables do have SSA_NAMEs but they
4534 are not considered gimple values. */
4535 else if ((TREE_CODE (t) == SSA_NAME
4536 && virtual_p != virtual_operand_p (t))
4537 || (virtual_p
4538 && (TREE_CODE (t) != SSA_NAME
4539 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4540 || (!virtual_p
4541 && !is_gimple_val (t)))
4543 error ("invalid PHI argument");
4544 debug_generic_expr (t);
4545 err |= true;
4547 #ifdef ENABLE_TYPES_CHECKING
4548 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4550 error ("incompatible types in PHI argument %u", i);
4551 debug_generic_stmt (TREE_TYPE (phi_result));
4552 debug_generic_stmt (TREE_TYPE (t));
4553 err |= true;
4555 #endif
4558 return err;
4561 /* Verify the GIMPLE statements inside the sequence STMTS. */
4563 static bool
4564 verify_gimple_in_seq_2 (gimple_seq stmts)
4566 gimple_stmt_iterator ittr;
4567 bool err = false;
4569 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4571 gimple stmt = gsi_stmt (ittr);
4573 switch (gimple_code (stmt))
4575 case GIMPLE_BIND:
4576 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4577 break;
4579 case GIMPLE_TRY:
4580 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4581 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4582 break;
4584 case GIMPLE_EH_FILTER:
4585 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4586 break;
4588 case GIMPLE_EH_ELSE:
4589 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4590 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4591 break;
4593 case GIMPLE_CATCH:
4594 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4595 break;
4597 case GIMPLE_TRANSACTION:
4598 err |= verify_gimple_transaction (stmt);
4599 break;
4601 default:
4603 bool err2 = verify_gimple_stmt (stmt);
4604 if (err2)
4605 debug_gimple_stmt (stmt);
4606 err |= err2;
4611 return err;
4614 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4615 is a problem, otherwise false. */
4617 static bool
4618 verify_gimple_transaction (gimple stmt)
4620 tree lab = gimple_transaction_label (stmt);
4621 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4622 return true;
4623 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4627 /* Verify the GIMPLE statements inside the statement list STMTS. */
4629 DEBUG_FUNCTION void
4630 verify_gimple_in_seq (gimple_seq stmts)
4632 timevar_push (TV_TREE_STMT_VERIFY);
4633 if (verify_gimple_in_seq_2 (stmts))
4634 internal_error ("verify_gimple failed");
4635 timevar_pop (TV_TREE_STMT_VERIFY);
4638 /* Return true when the T can be shared. */
4640 static bool
4641 tree_node_can_be_shared (tree t)
4643 if (IS_TYPE_OR_DECL_P (t)
4644 || is_gimple_min_invariant (t)
4645 || TREE_CODE (t) == SSA_NAME
4646 || t == error_mark_node
4647 || TREE_CODE (t) == IDENTIFIER_NODE)
4648 return true;
4650 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4651 return true;
4653 if (DECL_P (t))
4654 return true;
4656 return false;
4659 /* Called via walk_tree. Verify tree sharing. */
4661 static tree
4662 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4664 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4666 if (tree_node_can_be_shared (*tp))
4668 *walk_subtrees = false;
4669 return NULL;
4672 if (pointer_set_insert (visited, *tp))
4673 return *tp;
4675 return NULL;
4678 /* Called via walk_gimple_stmt. Verify tree sharing. */
4680 static tree
4681 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4683 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4684 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4687 static bool eh_error_found;
4688 static int
4689 verify_eh_throw_stmt_node (void **slot, void *data)
4691 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4692 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4694 if (!pointer_set_contains (visited, node->stmt))
4696 error ("dead STMT in EH table");
4697 debug_gimple_stmt (node->stmt);
4698 eh_error_found = true;
4700 return 1;
4703 /* Verify if the location LOCs block is in BLOCKS. */
4705 static bool
4706 verify_location (pointer_set_t *blocks, location_t loc)
4708 tree block = LOCATION_BLOCK (loc);
4709 if (block != NULL_TREE
4710 && !pointer_set_contains (blocks, block))
4712 error ("location references block not in block tree");
4713 return true;
4715 if (block != NULL_TREE)
4716 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4717 return false;
4720 /* Called via walk_tree. Verify that expressions have no blocks. */
4722 static tree
4723 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4725 if (!EXPR_P (*tp))
4727 *walk_subtrees = false;
4728 return NULL;
4731 location_t loc = EXPR_LOCATION (*tp);
4732 if (LOCATION_BLOCK (loc) != NULL)
4733 return *tp;
4735 return NULL;
4738 /* Called via walk_tree. Verify locations of expressions. */
4740 static tree
4741 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4743 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4745 if (TREE_CODE (*tp) == VAR_DECL
4746 && DECL_HAS_DEBUG_EXPR_P (*tp))
4748 tree t = DECL_DEBUG_EXPR (*tp);
4749 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4750 if (addr)
4751 return addr;
4753 if ((TREE_CODE (*tp) == VAR_DECL
4754 || TREE_CODE (*tp) == PARM_DECL
4755 || TREE_CODE (*tp) == RESULT_DECL)
4756 && DECL_HAS_VALUE_EXPR_P (*tp))
4758 tree t = DECL_VALUE_EXPR (*tp);
4759 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4760 if (addr)
4761 return addr;
4764 if (!EXPR_P (*tp))
4766 *walk_subtrees = false;
4767 return NULL;
4770 location_t loc = EXPR_LOCATION (*tp);
4771 if (verify_location (blocks, loc))
4772 return *tp;
4774 return NULL;
4777 /* Called via walk_gimple_op. Verify locations of expressions. */
4779 static tree
4780 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4782 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4783 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4786 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4788 static void
4789 collect_subblocks (pointer_set_t *blocks, tree block)
4791 tree t;
4792 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4794 pointer_set_insert (blocks, t);
4795 collect_subblocks (blocks, t);
4799 /* Verify the GIMPLE statements in the CFG of FN. */
4801 DEBUG_FUNCTION void
4802 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4804 basic_block bb;
4805 bool err = false;
4806 struct pointer_set_t *visited, *visited_stmts, *blocks;
4808 timevar_push (TV_TREE_STMT_VERIFY);
4809 visited = pointer_set_create ();
4810 visited_stmts = pointer_set_create ();
4812 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4813 blocks = pointer_set_create ();
4814 if (DECL_INITIAL (fn->decl))
4816 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4817 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4820 FOR_EACH_BB_FN (bb, fn)
4822 gimple_stmt_iterator gsi;
4824 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4826 gimple phi = gsi_stmt (gsi);
4827 bool err2 = false;
4828 unsigned i;
4830 pointer_set_insert (visited_stmts, phi);
4832 if (gimple_bb (phi) != bb)
4834 error ("gimple_bb (phi) is set to a wrong basic block");
4835 err2 = true;
4838 err2 |= verify_gimple_phi (phi);
4840 /* Only PHI arguments have locations. */
4841 if (gimple_location (phi) != UNKNOWN_LOCATION)
4843 error ("PHI node with location");
4844 err2 = true;
4847 for (i = 0; i < gimple_phi_num_args (phi); i++)
4849 tree arg = gimple_phi_arg_def (phi, i);
4850 tree addr = walk_tree (&arg, verify_node_sharing_1,
4851 visited, NULL);
4852 if (addr)
4854 error ("incorrect sharing of tree nodes");
4855 debug_generic_expr (addr);
4856 err2 |= true;
4858 location_t loc = gimple_phi_arg_location (phi, i);
4859 if (virtual_operand_p (gimple_phi_result (phi))
4860 && loc != UNKNOWN_LOCATION)
4862 error ("virtual PHI with argument locations");
4863 err2 = true;
4865 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4866 if (addr)
4868 debug_generic_expr (addr);
4869 err2 = true;
4871 err2 |= verify_location (blocks, loc);
4874 if (err2)
4875 debug_gimple_stmt (phi);
4876 err |= err2;
4879 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4881 gimple stmt = gsi_stmt (gsi);
4882 bool err2 = false;
4883 struct walk_stmt_info wi;
4884 tree addr;
4885 int lp_nr;
4887 pointer_set_insert (visited_stmts, stmt);
4889 if (gimple_bb (stmt) != bb)
4891 error ("gimple_bb (stmt) is set to a wrong basic block");
4892 err2 = true;
4895 err2 |= verify_gimple_stmt (stmt);
4896 err2 |= verify_location (blocks, gimple_location (stmt));
4898 memset (&wi, 0, sizeof (wi));
4899 wi.info = (void *) visited;
4900 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4901 if (addr)
4903 error ("incorrect sharing of tree nodes");
4904 debug_generic_expr (addr);
4905 err2 |= true;
4908 memset (&wi, 0, sizeof (wi));
4909 wi.info = (void *) blocks;
4910 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4911 if (addr)
4913 debug_generic_expr (addr);
4914 err2 |= true;
4917 /* ??? Instead of not checking these stmts at all the walker
4918 should know its context via wi. */
4919 if (!is_gimple_debug (stmt)
4920 && !is_gimple_omp (stmt))
4922 memset (&wi, 0, sizeof (wi));
4923 addr = walk_gimple_op (stmt, verify_expr, &wi);
4924 if (addr)
4926 debug_generic_expr (addr);
4927 inform (gimple_location (stmt), "in statement");
4928 err2 |= true;
4932 /* If the statement is marked as part of an EH region, then it is
4933 expected that the statement could throw. Verify that when we
4934 have optimizations that simplify statements such that we prove
4935 that they cannot throw, that we update other data structures
4936 to match. */
4937 lp_nr = lookup_stmt_eh_lp (stmt);
4938 if (lp_nr > 0)
4940 if (!stmt_could_throw_p (stmt))
4942 if (verify_nothrow)
4944 error ("statement marked for throw, but doesn%'t");
4945 err2 |= true;
4948 else if (!gsi_one_before_end_p (gsi))
4950 error ("statement marked for throw in middle of block");
4951 err2 |= true;
4955 if (err2)
4956 debug_gimple_stmt (stmt);
4957 err |= err2;
4961 eh_error_found = false;
4962 if (get_eh_throw_stmt_table (cfun))
4963 htab_traverse (get_eh_throw_stmt_table (cfun),
4964 verify_eh_throw_stmt_node,
4965 visited_stmts);
4967 if (err || eh_error_found)
4968 internal_error ("verify_gimple failed");
4970 pointer_set_destroy (visited);
4971 pointer_set_destroy (visited_stmts);
4972 pointer_set_destroy (blocks);
4973 verify_histograms ();
4974 timevar_pop (TV_TREE_STMT_VERIFY);
4978 /* Verifies that the flow information is OK. */
4980 static int
4981 gimple_verify_flow_info (void)
4983 int err = 0;
4984 basic_block bb;
4985 gimple_stmt_iterator gsi;
4986 gimple stmt;
4987 edge e;
4988 edge_iterator ei;
4990 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4991 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4993 error ("ENTRY_BLOCK has IL associated with it");
4994 err = 1;
4997 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4998 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5000 error ("EXIT_BLOCK has IL associated with it");
5001 err = 1;
5004 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5005 if (e->flags & EDGE_FALLTHRU)
5007 error ("fallthru to exit from bb %d", e->src->index);
5008 err = 1;
5011 FOR_EACH_BB_FN (bb, cfun)
5013 bool found_ctrl_stmt = false;
5015 stmt = NULL;
5017 /* Skip labels on the start of basic block. */
5018 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5020 tree label;
5021 gimple prev_stmt = stmt;
5023 stmt = gsi_stmt (gsi);
5025 if (gimple_code (stmt) != GIMPLE_LABEL)
5026 break;
5028 label = gimple_label_label (stmt);
5029 if (prev_stmt && DECL_NONLOCAL (label))
5031 error ("nonlocal label ");
5032 print_generic_expr (stderr, label, 0);
5033 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5034 bb->index);
5035 err = 1;
5038 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5040 error ("EH landing pad label ");
5041 print_generic_expr (stderr, label, 0);
5042 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5043 bb->index);
5044 err = 1;
5047 if (label_to_block (label) != bb)
5049 error ("label ");
5050 print_generic_expr (stderr, label, 0);
5051 fprintf (stderr, " to block does not match in bb %d",
5052 bb->index);
5053 err = 1;
5056 if (decl_function_context (label) != current_function_decl)
5058 error ("label ");
5059 print_generic_expr (stderr, label, 0);
5060 fprintf (stderr, " has incorrect context in bb %d",
5061 bb->index);
5062 err = 1;
5066 /* Verify that body of basic block BB is free of control flow. */
5067 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5069 gimple stmt = gsi_stmt (gsi);
5071 if (found_ctrl_stmt)
5073 error ("control flow in the middle of basic block %d",
5074 bb->index);
5075 err = 1;
5078 if (stmt_ends_bb_p (stmt))
5079 found_ctrl_stmt = true;
5081 if (gimple_code (stmt) == GIMPLE_LABEL)
5083 error ("label ");
5084 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5085 fprintf (stderr, " in the middle of basic block %d", bb->index);
5086 err = 1;
5090 gsi = gsi_last_bb (bb);
5091 if (gsi_end_p (gsi))
5092 continue;
5094 stmt = gsi_stmt (gsi);
5096 if (gimple_code (stmt) == GIMPLE_LABEL)
5097 continue;
5099 err |= verify_eh_edges (stmt);
5101 if (is_ctrl_stmt (stmt))
5103 FOR_EACH_EDGE (e, ei, bb->succs)
5104 if (e->flags & EDGE_FALLTHRU)
5106 error ("fallthru edge after a control statement in bb %d",
5107 bb->index);
5108 err = 1;
5112 if (gimple_code (stmt) != GIMPLE_COND)
5114 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5115 after anything else but if statement. */
5116 FOR_EACH_EDGE (e, ei, bb->succs)
5117 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5119 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5120 bb->index);
5121 err = 1;
5125 switch (gimple_code (stmt))
5127 case GIMPLE_COND:
5129 edge true_edge;
5130 edge false_edge;
5132 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5134 if (!true_edge
5135 || !false_edge
5136 || !(true_edge->flags & EDGE_TRUE_VALUE)
5137 || !(false_edge->flags & EDGE_FALSE_VALUE)
5138 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5139 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5140 || EDGE_COUNT (bb->succs) >= 3)
5142 error ("wrong outgoing edge flags at end of bb %d",
5143 bb->index);
5144 err = 1;
5147 break;
5149 case GIMPLE_GOTO:
5150 if (simple_goto_p (stmt))
5152 error ("explicit goto at end of bb %d", bb->index);
5153 err = 1;
5155 else
5157 /* FIXME. We should double check that the labels in the
5158 destination blocks have their address taken. */
5159 FOR_EACH_EDGE (e, ei, bb->succs)
5160 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5161 | EDGE_FALSE_VALUE))
5162 || !(e->flags & EDGE_ABNORMAL))
5164 error ("wrong outgoing edge flags at end of bb %d",
5165 bb->index);
5166 err = 1;
5169 break;
5171 case GIMPLE_CALL:
5172 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5173 break;
5174 /* ... fallthru ... */
5175 case GIMPLE_RETURN:
5176 if (!single_succ_p (bb)
5177 || (single_succ_edge (bb)->flags
5178 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5179 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5181 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5182 err = 1;
5184 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5186 error ("return edge does not point to exit in bb %d",
5187 bb->index);
5188 err = 1;
5190 break;
5192 case GIMPLE_SWITCH:
5194 tree prev;
5195 edge e;
5196 size_t i, n;
5198 n = gimple_switch_num_labels (stmt);
5200 /* Mark all the destination basic blocks. */
5201 for (i = 0; i < n; ++i)
5203 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5204 basic_block label_bb = label_to_block (lab);
5205 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5206 label_bb->aux = (void *)1;
5209 /* Verify that the case labels are sorted. */
5210 prev = gimple_switch_label (stmt, 0);
5211 for (i = 1; i < n; ++i)
5213 tree c = gimple_switch_label (stmt, i);
5214 if (!CASE_LOW (c))
5216 error ("found default case not at the start of "
5217 "case vector");
5218 err = 1;
5219 continue;
5221 if (CASE_LOW (prev)
5222 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5224 error ("case labels not sorted: ");
5225 print_generic_expr (stderr, prev, 0);
5226 fprintf (stderr," is greater than ");
5227 print_generic_expr (stderr, c, 0);
5228 fprintf (stderr," but comes before it.\n");
5229 err = 1;
5231 prev = c;
5233 /* VRP will remove the default case if it can prove it will
5234 never be executed. So do not verify there always exists
5235 a default case here. */
5237 FOR_EACH_EDGE (e, ei, bb->succs)
5239 if (!e->dest->aux)
5241 error ("extra outgoing edge %d->%d",
5242 bb->index, e->dest->index);
5243 err = 1;
5246 e->dest->aux = (void *)2;
5247 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5248 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5250 error ("wrong outgoing edge flags at end of bb %d",
5251 bb->index);
5252 err = 1;
5256 /* Check that we have all of them. */
5257 for (i = 0; i < n; ++i)
5259 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5260 basic_block label_bb = label_to_block (lab);
5262 if (label_bb->aux != (void *)2)
5264 error ("missing edge %i->%i", bb->index, label_bb->index);
5265 err = 1;
5269 FOR_EACH_EDGE (e, ei, bb->succs)
5270 e->dest->aux = (void *)0;
5272 break;
5274 case GIMPLE_EH_DISPATCH:
5275 err |= verify_eh_dispatch_edge (stmt);
5276 break;
5278 default:
5279 break;
5283 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5284 verify_dominators (CDI_DOMINATORS);
5286 return err;
5290 /* Updates phi nodes after creating a forwarder block joined
5291 by edge FALLTHRU. */
5293 static void
5294 gimple_make_forwarder_block (edge fallthru)
5296 edge e;
5297 edge_iterator ei;
5298 basic_block dummy, bb;
5299 tree var;
5300 gimple_stmt_iterator gsi;
5302 dummy = fallthru->src;
5303 bb = fallthru->dest;
5305 if (single_pred_p (bb))
5306 return;
5308 /* If we redirected a branch we must create new PHI nodes at the
5309 start of BB. */
5310 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5312 gimple phi, new_phi;
5314 phi = gsi_stmt (gsi);
5315 var = gimple_phi_result (phi);
5316 new_phi = create_phi_node (var, bb);
5317 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5318 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5319 UNKNOWN_LOCATION);
5322 /* Add the arguments we have stored on edges. */
5323 FOR_EACH_EDGE (e, ei, bb->preds)
5325 if (e == fallthru)
5326 continue;
5328 flush_pending_stmts (e);
5333 /* Return a non-special label in the head of basic block BLOCK.
5334 Create one if it doesn't exist. */
5336 tree
5337 gimple_block_label (basic_block bb)
5339 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5340 bool first = true;
5341 tree label;
5342 gimple stmt;
5344 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5346 stmt = gsi_stmt (i);
5347 if (gimple_code (stmt) != GIMPLE_LABEL)
5348 break;
5349 label = gimple_label_label (stmt);
5350 if (!DECL_NONLOCAL (label))
5352 if (!first)
5353 gsi_move_before (&i, &s);
5354 return label;
5358 label = create_artificial_label (UNKNOWN_LOCATION);
5359 stmt = gimple_build_label (label);
5360 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5361 return label;
5365 /* Attempt to perform edge redirection by replacing a possibly complex
5366 jump instruction by a goto or by removing the jump completely.
5367 This can apply only if all edges now point to the same block. The
5368 parameters and return values are equivalent to
5369 redirect_edge_and_branch. */
5371 static edge
5372 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5374 basic_block src = e->src;
5375 gimple_stmt_iterator i;
5376 gimple stmt;
5378 /* We can replace or remove a complex jump only when we have exactly
5379 two edges. */
5380 if (EDGE_COUNT (src->succs) != 2
5381 /* Verify that all targets will be TARGET. Specifically, the
5382 edge that is not E must also go to TARGET. */
5383 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5384 return NULL;
5386 i = gsi_last_bb (src);
5387 if (gsi_end_p (i))
5388 return NULL;
5390 stmt = gsi_stmt (i);
5392 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5394 gsi_remove (&i, true);
5395 e = ssa_redirect_edge (e, target);
5396 e->flags = EDGE_FALLTHRU;
5397 return e;
5400 return NULL;
5404 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5405 edge representing the redirected branch. */
5407 static edge
5408 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5410 basic_block bb = e->src;
5411 gimple_stmt_iterator gsi;
5412 edge ret;
5413 gimple stmt;
5415 if (e->flags & EDGE_ABNORMAL)
5416 return NULL;
5418 if (e->dest == dest)
5419 return NULL;
5421 if (e->flags & EDGE_EH)
5422 return redirect_eh_edge (e, dest);
5424 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5426 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5427 if (ret)
5428 return ret;
5431 gsi = gsi_last_bb (bb);
5432 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5434 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5436 case GIMPLE_COND:
5437 /* For COND_EXPR, we only need to redirect the edge. */
5438 break;
5440 case GIMPLE_GOTO:
5441 /* No non-abnormal edges should lead from a non-simple goto, and
5442 simple ones should be represented implicitly. */
5443 gcc_unreachable ();
5445 case GIMPLE_SWITCH:
5447 tree label = gimple_block_label (dest);
5448 tree cases = get_cases_for_edge (e, stmt);
5450 /* If we have a list of cases associated with E, then use it
5451 as it's a lot faster than walking the entire case vector. */
5452 if (cases)
5454 edge e2 = find_edge (e->src, dest);
5455 tree last, first;
5457 first = cases;
5458 while (cases)
5460 last = cases;
5461 CASE_LABEL (cases) = label;
5462 cases = CASE_CHAIN (cases);
5465 /* If there was already an edge in the CFG, then we need
5466 to move all the cases associated with E to E2. */
5467 if (e2)
5469 tree cases2 = get_cases_for_edge (e2, stmt);
5471 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5472 CASE_CHAIN (cases2) = first;
5474 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5476 else
5478 size_t i, n = gimple_switch_num_labels (stmt);
5480 for (i = 0; i < n; i++)
5482 tree elt = gimple_switch_label (stmt, i);
5483 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5484 CASE_LABEL (elt) = label;
5488 break;
5490 case GIMPLE_ASM:
5492 int i, n = gimple_asm_nlabels (stmt);
5493 tree label = NULL;
5495 for (i = 0; i < n; ++i)
5497 tree cons = gimple_asm_label_op (stmt, i);
5498 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5500 if (!label)
5501 label = gimple_block_label (dest);
5502 TREE_VALUE (cons) = label;
5506 /* If we didn't find any label matching the former edge in the
5507 asm labels, we must be redirecting the fallthrough
5508 edge. */
5509 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5511 break;
5513 case GIMPLE_RETURN:
5514 gsi_remove (&gsi, true);
5515 e->flags |= EDGE_FALLTHRU;
5516 break;
5518 case GIMPLE_OMP_RETURN:
5519 case GIMPLE_OMP_CONTINUE:
5520 case GIMPLE_OMP_SECTIONS_SWITCH:
5521 case GIMPLE_OMP_FOR:
5522 /* The edges from OMP constructs can be simply redirected. */
5523 break;
5525 case GIMPLE_EH_DISPATCH:
5526 if (!(e->flags & EDGE_FALLTHRU))
5527 redirect_eh_dispatch_edge (stmt, e, dest);
5528 break;
5530 case GIMPLE_TRANSACTION:
5531 /* The ABORT edge has a stored label associated with it, otherwise
5532 the edges are simply redirectable. */
5533 if (e->flags == 0)
5534 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5535 break;
5537 default:
5538 /* Otherwise it must be a fallthru edge, and we don't need to
5539 do anything besides redirecting it. */
5540 gcc_assert (e->flags & EDGE_FALLTHRU);
5541 break;
5544 /* Update/insert PHI nodes as necessary. */
5546 /* Now update the edges in the CFG. */
5547 e = ssa_redirect_edge (e, dest);
5549 return e;
5552 /* Returns true if it is possible to remove edge E by redirecting
5553 it to the destination of the other edge from E->src. */
5555 static bool
5556 gimple_can_remove_branch_p (const_edge e)
5558 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5559 return false;
5561 return true;
5564 /* Simple wrapper, as we can always redirect fallthru edges. */
5566 static basic_block
5567 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5569 e = gimple_redirect_edge_and_branch (e, dest);
5570 gcc_assert (e);
5572 return NULL;
5576 /* Splits basic block BB after statement STMT (but at least after the
5577 labels). If STMT is NULL, BB is split just after the labels. */
5579 static basic_block
5580 gimple_split_block (basic_block bb, void *stmt)
5582 gimple_stmt_iterator gsi;
5583 gimple_stmt_iterator gsi_tgt;
5584 gimple act;
5585 gimple_seq list;
5586 basic_block new_bb;
5587 edge e;
5588 edge_iterator ei;
5590 new_bb = create_empty_bb (bb);
5592 /* Redirect the outgoing edges. */
5593 new_bb->succs = bb->succs;
5594 bb->succs = NULL;
5595 FOR_EACH_EDGE (e, ei, new_bb->succs)
5596 e->src = new_bb;
5598 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5599 stmt = NULL;
5601 /* Move everything from GSI to the new basic block. */
5602 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5604 act = gsi_stmt (gsi);
5605 if (gimple_code (act) == GIMPLE_LABEL)
5606 continue;
5608 if (!stmt)
5609 break;
5611 if (stmt == act)
5613 gsi_next (&gsi);
5614 break;
5618 if (gsi_end_p (gsi))
5619 return new_bb;
5621 /* Split the statement list - avoid re-creating new containers as this
5622 brings ugly quadratic memory consumption in the inliner.
5623 (We are still quadratic since we need to update stmt BB pointers,
5624 sadly.) */
5625 gsi_split_seq_before (&gsi, &list);
5626 set_bb_seq (new_bb, list);
5627 for (gsi_tgt = gsi_start (list);
5628 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5629 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5631 return new_bb;
5635 /* Moves basic block BB after block AFTER. */
5637 static bool
5638 gimple_move_block_after (basic_block bb, basic_block after)
5640 if (bb->prev_bb == after)
5641 return true;
5643 unlink_block (bb);
5644 link_block (bb, after);
5646 return true;
5650 /* Return TRUE if block BB has no executable statements, otherwise return
5651 FALSE. */
5653 static bool
5654 gimple_empty_block_p (basic_block bb)
5656 /* BB must have no executable statements. */
5657 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5658 if (phi_nodes (bb))
5659 return false;
5660 if (gsi_end_p (gsi))
5661 return true;
5662 if (is_gimple_debug (gsi_stmt (gsi)))
5663 gsi_next_nondebug (&gsi);
5664 return gsi_end_p (gsi);
5668 /* Split a basic block if it ends with a conditional branch and if the
5669 other part of the block is not empty. */
5671 static basic_block
5672 gimple_split_block_before_cond_jump (basic_block bb)
5674 gimple last, split_point;
5675 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5676 if (gsi_end_p (gsi))
5677 return NULL;
5678 last = gsi_stmt (gsi);
5679 if (gimple_code (last) != GIMPLE_COND
5680 && gimple_code (last) != GIMPLE_SWITCH)
5681 return NULL;
5682 gsi_prev_nondebug (&gsi);
5683 split_point = gsi_stmt (gsi);
5684 return split_block (bb, split_point)->dest;
5688 /* Return true if basic_block can be duplicated. */
5690 static bool
5691 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5693 return true;
5696 /* Create a duplicate of the basic block BB. NOTE: This does not
5697 preserve SSA form. */
5699 static basic_block
5700 gimple_duplicate_bb (basic_block bb)
5702 basic_block new_bb;
5703 gimple_stmt_iterator gsi, gsi_tgt;
5704 gimple_seq phis = phi_nodes (bb);
5705 gimple phi, stmt, copy;
5707 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5709 /* Copy the PHI nodes. We ignore PHI node arguments here because
5710 the incoming edges have not been setup yet. */
5711 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5713 phi = gsi_stmt (gsi);
5714 copy = create_phi_node (NULL_TREE, new_bb);
5715 create_new_def_for (gimple_phi_result (phi), copy,
5716 gimple_phi_result_ptr (copy));
5717 gimple_set_uid (copy, gimple_uid (phi));
5720 gsi_tgt = gsi_start_bb (new_bb);
5721 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5723 def_operand_p def_p;
5724 ssa_op_iter op_iter;
5725 tree lhs;
5727 stmt = gsi_stmt (gsi);
5728 if (gimple_code (stmt) == GIMPLE_LABEL)
5729 continue;
5731 /* Don't duplicate label debug stmts. */
5732 if (gimple_debug_bind_p (stmt)
5733 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5734 == LABEL_DECL)
5735 continue;
5737 /* Create a new copy of STMT and duplicate STMT's virtual
5738 operands. */
5739 copy = gimple_copy (stmt);
5740 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5742 maybe_duplicate_eh_stmt (copy, stmt);
5743 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5745 /* When copying around a stmt writing into a local non-user
5746 aggregate, make sure it won't share stack slot with other
5747 vars. */
5748 lhs = gimple_get_lhs (stmt);
5749 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5751 tree base = get_base_address (lhs);
5752 if (base
5753 && (TREE_CODE (base) == VAR_DECL
5754 || TREE_CODE (base) == RESULT_DECL)
5755 && DECL_IGNORED_P (base)
5756 && !TREE_STATIC (base)
5757 && !DECL_EXTERNAL (base)
5758 && (TREE_CODE (base) != VAR_DECL
5759 || !DECL_HAS_VALUE_EXPR_P (base)))
5760 DECL_NONSHAREABLE (base) = 1;
5763 /* Create new names for all the definitions created by COPY and
5764 add replacement mappings for each new name. */
5765 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5766 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5769 return new_bb;
5772 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5774 static void
5775 add_phi_args_after_copy_edge (edge e_copy)
5777 basic_block bb, bb_copy = e_copy->src, dest;
5778 edge e;
5779 edge_iterator ei;
5780 gimple phi, phi_copy;
5781 tree def;
5782 gimple_stmt_iterator psi, psi_copy;
5784 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5785 return;
5787 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5789 if (e_copy->dest->flags & BB_DUPLICATED)
5790 dest = get_bb_original (e_copy->dest);
5791 else
5792 dest = e_copy->dest;
5794 e = find_edge (bb, dest);
5795 if (!e)
5797 /* During loop unrolling the target of the latch edge is copied.
5798 In this case we are not looking for edge to dest, but to
5799 duplicated block whose original was dest. */
5800 FOR_EACH_EDGE (e, ei, bb->succs)
5802 if ((e->dest->flags & BB_DUPLICATED)
5803 && get_bb_original (e->dest) == dest)
5804 break;
5807 gcc_assert (e != NULL);
5810 for (psi = gsi_start_phis (e->dest),
5811 psi_copy = gsi_start_phis (e_copy->dest);
5812 !gsi_end_p (psi);
5813 gsi_next (&psi), gsi_next (&psi_copy))
5815 phi = gsi_stmt (psi);
5816 phi_copy = gsi_stmt (psi_copy);
5817 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5818 add_phi_arg (phi_copy, def, e_copy,
5819 gimple_phi_arg_location_from_edge (phi, e));
5824 /* Basic block BB_COPY was created by code duplication. Add phi node
5825 arguments for edges going out of BB_COPY. The blocks that were
5826 duplicated have BB_DUPLICATED set. */
5828 void
5829 add_phi_args_after_copy_bb (basic_block bb_copy)
5831 edge e_copy;
5832 edge_iterator ei;
5834 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5836 add_phi_args_after_copy_edge (e_copy);
5840 /* Blocks in REGION_COPY array of length N_REGION were created by
5841 duplication of basic blocks. Add phi node arguments for edges
5842 going from these blocks. If E_COPY is not NULL, also add
5843 phi node arguments for its destination.*/
5845 void
5846 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5847 edge e_copy)
5849 unsigned i;
5851 for (i = 0; i < n_region; i++)
5852 region_copy[i]->flags |= BB_DUPLICATED;
5854 for (i = 0; i < n_region; i++)
5855 add_phi_args_after_copy_bb (region_copy[i]);
5856 if (e_copy)
5857 add_phi_args_after_copy_edge (e_copy);
5859 for (i = 0; i < n_region; i++)
5860 region_copy[i]->flags &= ~BB_DUPLICATED;
5863 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5864 important exit edge EXIT. By important we mean that no SSA name defined
5865 inside region is live over the other exit edges of the region. All entry
5866 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5867 to the duplicate of the region. Dominance and loop information is
5868 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5869 UPDATE_DOMINANCE is false then we assume that the caller will update the
5870 dominance information after calling this function. The new basic
5871 blocks are stored to REGION_COPY in the same order as they had in REGION,
5872 provided that REGION_COPY is not NULL.
5873 The function returns false if it is unable to copy the region,
5874 true otherwise. */
5876 bool
5877 gimple_duplicate_sese_region (edge entry, edge exit,
5878 basic_block *region, unsigned n_region,
5879 basic_block *region_copy,
5880 bool update_dominance)
5882 unsigned i;
5883 bool free_region_copy = false, copying_header = false;
5884 struct loop *loop = entry->dest->loop_father;
5885 edge exit_copy;
5886 vec<basic_block> doms;
5887 edge redirected;
5888 int total_freq = 0, entry_freq = 0;
5889 gcov_type total_count = 0, entry_count = 0;
5891 if (!can_copy_bbs_p (region, n_region))
5892 return false;
5894 /* Some sanity checking. Note that we do not check for all possible
5895 missuses of the functions. I.e. if you ask to copy something weird,
5896 it will work, but the state of structures probably will not be
5897 correct. */
5898 for (i = 0; i < n_region; i++)
5900 /* We do not handle subloops, i.e. all the blocks must belong to the
5901 same loop. */
5902 if (region[i]->loop_father != loop)
5903 return false;
5905 if (region[i] != entry->dest
5906 && region[i] == loop->header)
5907 return false;
5910 /* In case the function is used for loop header copying (which is the primary
5911 use), ensure that EXIT and its copy will be new latch and entry edges. */
5912 if (loop->header == entry->dest)
5914 copying_header = true;
5916 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5917 return false;
5919 for (i = 0; i < n_region; i++)
5920 if (region[i] != exit->src
5921 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5922 return false;
5925 initialize_original_copy_tables ();
5927 if (copying_header)
5928 set_loop_copy (loop, loop_outer (loop));
5929 else
5930 set_loop_copy (loop, loop);
5932 if (!region_copy)
5934 region_copy = XNEWVEC (basic_block, n_region);
5935 free_region_copy = true;
5938 /* Record blocks outside the region that are dominated by something
5939 inside. */
5940 if (update_dominance)
5942 doms.create (0);
5943 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5946 if (entry->dest->count)
5948 total_count = entry->dest->count;
5949 entry_count = entry->count;
5950 /* Fix up corner cases, to avoid division by zero or creation of negative
5951 frequencies. */
5952 if (entry_count > total_count)
5953 entry_count = total_count;
5955 else
5957 total_freq = entry->dest->frequency;
5958 entry_freq = EDGE_FREQUENCY (entry);
5959 /* Fix up corner cases, to avoid division by zero or creation of negative
5960 frequencies. */
5961 if (total_freq == 0)
5962 total_freq = 1;
5963 else if (entry_freq > total_freq)
5964 entry_freq = total_freq;
5967 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5968 split_edge_bb_loc (entry), update_dominance);
5969 if (total_count)
5971 scale_bbs_frequencies_gcov_type (region, n_region,
5972 total_count - entry_count,
5973 total_count);
5974 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5975 total_count);
5977 else
5979 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5980 total_freq);
5981 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5984 if (copying_header)
5986 loop->header = exit->dest;
5987 loop->latch = exit->src;
5990 /* Redirect the entry and add the phi node arguments. */
5991 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5992 gcc_assert (redirected != NULL);
5993 flush_pending_stmts (entry);
5995 /* Concerning updating of dominators: We must recount dominators
5996 for entry block and its copy. Anything that is outside of the
5997 region, but was dominated by something inside needs recounting as
5998 well. */
5999 if (update_dominance)
6001 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6002 doms.safe_push (get_bb_original (entry->dest));
6003 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6004 doms.release ();
6007 /* Add the other PHI node arguments. */
6008 add_phi_args_after_copy (region_copy, n_region, NULL);
6010 if (free_region_copy)
6011 free (region_copy);
6013 free_original_copy_tables ();
6014 return true;
6017 /* Checks if BB is part of the region defined by N_REGION BBS. */
6018 static bool
6019 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6021 unsigned int n;
6023 for (n = 0; n < n_region; n++)
6025 if (bb == bbs[n])
6026 return true;
6028 return false;
6031 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6032 are stored to REGION_COPY in the same order in that they appear
6033 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6034 the region, EXIT an exit from it. The condition guarding EXIT
6035 is moved to ENTRY. Returns true if duplication succeeds, false
6036 otherwise.
6038 For example,
6040 some_code;
6041 if (cond)
6043 else
6046 is transformed to
6048 if (cond)
6050 some_code;
6053 else
6055 some_code;
6060 bool
6061 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6062 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6063 basic_block *region_copy ATTRIBUTE_UNUSED)
6065 unsigned i;
6066 bool free_region_copy = false;
6067 struct loop *loop = exit->dest->loop_father;
6068 struct loop *orig_loop = entry->dest->loop_father;
6069 basic_block switch_bb, entry_bb, nentry_bb;
6070 vec<basic_block> doms;
6071 int total_freq = 0, exit_freq = 0;
6072 gcov_type total_count = 0, exit_count = 0;
6073 edge exits[2], nexits[2], e;
6074 gimple_stmt_iterator gsi;
6075 gimple cond_stmt;
6076 edge sorig, snew;
6077 basic_block exit_bb;
6078 gimple_stmt_iterator psi;
6079 gimple phi;
6080 tree def;
6081 struct loop *target, *aloop, *cloop;
6083 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6084 exits[0] = exit;
6085 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6087 if (!can_copy_bbs_p (region, n_region))
6088 return false;
6090 initialize_original_copy_tables ();
6091 set_loop_copy (orig_loop, loop);
6093 target= loop;
6094 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6096 if (bb_part_of_region_p (aloop->header, region, n_region))
6098 cloop = duplicate_loop (aloop, target);
6099 duplicate_subloops (aloop, cloop);
6103 if (!region_copy)
6105 region_copy = XNEWVEC (basic_block, n_region);
6106 free_region_copy = true;
6109 gcc_assert (!need_ssa_update_p (cfun));
6111 /* Record blocks outside the region that are dominated by something
6112 inside. */
6113 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6115 if (exit->src->count)
6117 total_count = exit->src->count;
6118 exit_count = exit->count;
6119 /* Fix up corner cases, to avoid division by zero or creation of negative
6120 frequencies. */
6121 if (exit_count > total_count)
6122 exit_count = total_count;
6124 else
6126 total_freq = exit->src->frequency;
6127 exit_freq = EDGE_FREQUENCY (exit);
6128 /* Fix up corner cases, to avoid division by zero or creation of negative
6129 frequencies. */
6130 if (total_freq == 0)
6131 total_freq = 1;
6132 if (exit_freq > total_freq)
6133 exit_freq = total_freq;
6136 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6137 split_edge_bb_loc (exit), true);
6138 if (total_count)
6140 scale_bbs_frequencies_gcov_type (region, n_region,
6141 total_count - exit_count,
6142 total_count);
6143 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6144 total_count);
6146 else
6148 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6149 total_freq);
6150 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6153 /* Create the switch block, and put the exit condition to it. */
6154 entry_bb = entry->dest;
6155 nentry_bb = get_bb_copy (entry_bb);
6156 if (!last_stmt (entry->src)
6157 || !stmt_ends_bb_p (last_stmt (entry->src)))
6158 switch_bb = entry->src;
6159 else
6160 switch_bb = split_edge (entry);
6161 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6163 gsi = gsi_last_bb (switch_bb);
6164 cond_stmt = last_stmt (exit->src);
6165 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6166 cond_stmt = gimple_copy (cond_stmt);
6168 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6170 sorig = single_succ_edge (switch_bb);
6171 sorig->flags = exits[1]->flags;
6172 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6174 /* Register the new edge from SWITCH_BB in loop exit lists. */
6175 rescan_loop_exit (snew, true, false);
6177 /* Add the PHI node arguments. */
6178 add_phi_args_after_copy (region_copy, n_region, snew);
6180 /* Get rid of now superfluous conditions and associated edges (and phi node
6181 arguments). */
6182 exit_bb = exit->dest;
6184 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6185 PENDING_STMT (e) = NULL;
6187 /* The latch of ORIG_LOOP was copied, and so was the backedge
6188 to the original header. We redirect this backedge to EXIT_BB. */
6189 for (i = 0; i < n_region; i++)
6190 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6192 gcc_assert (single_succ_edge (region_copy[i]));
6193 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6194 PENDING_STMT (e) = NULL;
6195 for (psi = gsi_start_phis (exit_bb);
6196 !gsi_end_p (psi);
6197 gsi_next (&psi))
6199 phi = gsi_stmt (psi);
6200 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6201 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6204 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6205 PENDING_STMT (e) = NULL;
6207 /* Anything that is outside of the region, but was dominated by something
6208 inside needs to update dominance info. */
6209 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6210 doms.release ();
6211 /* Update the SSA web. */
6212 update_ssa (TODO_update_ssa);
6214 if (free_region_copy)
6215 free (region_copy);
6217 free_original_copy_tables ();
6218 return true;
6221 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6222 adding blocks when the dominator traversal reaches EXIT. This
6223 function silently assumes that ENTRY strictly dominates EXIT. */
6225 void
6226 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6227 vec<basic_block> *bbs_p)
6229 basic_block son;
6231 for (son = first_dom_son (CDI_DOMINATORS, entry);
6232 son;
6233 son = next_dom_son (CDI_DOMINATORS, son))
6235 bbs_p->safe_push (son);
6236 if (son != exit)
6237 gather_blocks_in_sese_region (son, exit, bbs_p);
6241 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6242 The duplicates are recorded in VARS_MAP. */
6244 static void
6245 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6246 tree to_context)
6248 tree t = *tp, new_t;
6249 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6250 void **loc;
6252 if (DECL_CONTEXT (t) == to_context)
6253 return;
6255 loc = pointer_map_contains (vars_map, t);
6257 if (!loc)
6259 loc = pointer_map_insert (vars_map, t);
6261 if (SSA_VAR_P (t))
6263 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6264 add_local_decl (f, new_t);
6266 else
6268 gcc_assert (TREE_CODE (t) == CONST_DECL);
6269 new_t = copy_node (t);
6271 DECL_CONTEXT (new_t) = to_context;
6273 *loc = new_t;
6275 else
6276 new_t = (tree) *loc;
6278 *tp = new_t;
6282 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6283 VARS_MAP maps old ssa names and var_decls to the new ones. */
6285 static tree
6286 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6287 tree to_context)
6289 void **loc;
6290 tree new_name;
6292 gcc_assert (!virtual_operand_p (name));
6294 loc = pointer_map_contains (vars_map, name);
6296 if (!loc)
6298 tree decl = SSA_NAME_VAR (name);
6299 if (decl)
6301 replace_by_duplicate_decl (&decl, vars_map, to_context);
6302 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6303 decl, SSA_NAME_DEF_STMT (name));
6304 if (SSA_NAME_IS_DEFAULT_DEF (name))
6305 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6306 decl, new_name);
6308 else
6309 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6310 name, SSA_NAME_DEF_STMT (name));
6312 loc = pointer_map_insert (vars_map, name);
6313 *loc = new_name;
6315 else
6316 new_name = (tree) *loc;
6318 return new_name;
6321 struct move_stmt_d
6323 tree orig_block;
6324 tree new_block;
6325 tree from_context;
6326 tree to_context;
6327 struct pointer_map_t *vars_map;
6328 htab_t new_label_map;
6329 struct pointer_map_t *eh_map;
6330 bool remap_decls_p;
6333 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6334 contained in *TP if it has been ORIG_BLOCK previously and change the
6335 DECL_CONTEXT of every local variable referenced in *TP. */
6337 static tree
6338 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6341 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6342 tree t = *tp;
6344 if (EXPR_P (t))
6346 tree block = TREE_BLOCK (t);
6347 if (block == p->orig_block
6348 || (p->orig_block == NULL_TREE
6349 && block != NULL_TREE))
6350 TREE_SET_BLOCK (t, p->new_block);
6351 #ifdef ENABLE_CHECKING
6352 else if (block != NULL_TREE)
6354 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6355 block = BLOCK_SUPERCONTEXT (block);
6356 gcc_assert (block == p->orig_block);
6358 #endif
6360 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6362 if (TREE_CODE (t) == SSA_NAME)
6363 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6364 else if (TREE_CODE (t) == LABEL_DECL)
6366 if (p->new_label_map)
6368 struct tree_map in, *out;
6369 in.base.from = t;
6370 out = (struct tree_map *)
6371 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6372 if (out)
6373 *tp = t = out->to;
6376 DECL_CONTEXT (t) = p->to_context;
6378 else if (p->remap_decls_p)
6380 /* Replace T with its duplicate. T should no longer appear in the
6381 parent function, so this looks wasteful; however, it may appear
6382 in referenced_vars, and more importantly, as virtual operands of
6383 statements, and in alias lists of other variables. It would be
6384 quite difficult to expunge it from all those places. ??? It might
6385 suffice to do this for addressable variables. */
6386 if ((TREE_CODE (t) == VAR_DECL
6387 && !is_global_var (t))
6388 || TREE_CODE (t) == CONST_DECL)
6389 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6391 *walk_subtrees = 0;
6393 else if (TYPE_P (t))
6394 *walk_subtrees = 0;
6396 return NULL_TREE;
6399 /* Helper for move_stmt_r. Given an EH region number for the source
6400 function, map that to the duplicate EH regio number in the dest. */
6402 static int
6403 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6405 eh_region old_r, new_r;
6406 void **slot;
6408 old_r = get_eh_region_from_number (old_nr);
6409 slot = pointer_map_contains (p->eh_map, old_r);
6410 new_r = (eh_region) *slot;
6412 return new_r->index;
6415 /* Similar, but operate on INTEGER_CSTs. */
6417 static tree
6418 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6420 int old_nr, new_nr;
6422 old_nr = tree_to_shwi (old_t_nr);
6423 new_nr = move_stmt_eh_region_nr (old_nr, p);
6425 return build_int_cst (integer_type_node, new_nr);
6428 /* Like move_stmt_op, but for gimple statements.
6430 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6431 contained in the current statement in *GSI_P and change the
6432 DECL_CONTEXT of every local variable referenced in the current
6433 statement. */
6435 static tree
6436 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6437 struct walk_stmt_info *wi)
6439 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6440 gimple stmt = gsi_stmt (*gsi_p);
6441 tree block = gimple_block (stmt);
6443 if (block == p->orig_block
6444 || (p->orig_block == NULL_TREE
6445 && block != NULL_TREE))
6446 gimple_set_block (stmt, p->new_block);
6448 switch (gimple_code (stmt))
6450 case GIMPLE_CALL:
6451 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6453 tree r, fndecl = gimple_call_fndecl (stmt);
6454 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6455 switch (DECL_FUNCTION_CODE (fndecl))
6457 case BUILT_IN_EH_COPY_VALUES:
6458 r = gimple_call_arg (stmt, 1);
6459 r = move_stmt_eh_region_tree_nr (r, p);
6460 gimple_call_set_arg (stmt, 1, r);
6461 /* FALLTHRU */
6463 case BUILT_IN_EH_POINTER:
6464 case BUILT_IN_EH_FILTER:
6465 r = gimple_call_arg (stmt, 0);
6466 r = move_stmt_eh_region_tree_nr (r, p);
6467 gimple_call_set_arg (stmt, 0, r);
6468 break;
6470 default:
6471 break;
6474 break;
6476 case GIMPLE_RESX:
6478 int r = gimple_resx_region (stmt);
6479 r = move_stmt_eh_region_nr (r, p);
6480 gimple_resx_set_region (stmt, r);
6482 break;
6484 case GIMPLE_EH_DISPATCH:
6486 int r = gimple_eh_dispatch_region (stmt);
6487 r = move_stmt_eh_region_nr (r, p);
6488 gimple_eh_dispatch_set_region (stmt, r);
6490 break;
6492 case GIMPLE_OMP_RETURN:
6493 case GIMPLE_OMP_CONTINUE:
6494 break;
6495 default:
6496 if (is_gimple_omp (stmt))
6498 /* Do not remap variables inside OMP directives. Variables
6499 referenced in clauses and directive header belong to the
6500 parent function and should not be moved into the child
6501 function. */
6502 bool save_remap_decls_p = p->remap_decls_p;
6503 p->remap_decls_p = false;
6504 *handled_ops_p = true;
6506 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6507 move_stmt_op, wi);
6509 p->remap_decls_p = save_remap_decls_p;
6511 break;
6514 return NULL_TREE;
6517 /* Move basic block BB from function CFUN to function DEST_FN. The
6518 block is moved out of the original linked list and placed after
6519 block AFTER in the new list. Also, the block is removed from the
6520 original array of blocks and placed in DEST_FN's array of blocks.
6521 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6522 updated to reflect the moved edges.
6524 The local variables are remapped to new instances, VARS_MAP is used
6525 to record the mapping. */
6527 static void
6528 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6529 basic_block after, bool update_edge_count_p,
6530 struct move_stmt_d *d)
6532 struct control_flow_graph *cfg;
6533 edge_iterator ei;
6534 edge e;
6535 gimple_stmt_iterator si;
6536 unsigned old_len, new_len;
6538 /* Remove BB from dominance structures. */
6539 delete_from_dominance_info (CDI_DOMINATORS, bb);
6541 /* Move BB from its current loop to the copy in the new function. */
6542 if (current_loops)
6544 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6545 if (new_loop)
6546 bb->loop_father = new_loop;
6549 /* Link BB to the new linked list. */
6550 move_block_after (bb, after);
6552 /* Update the edge count in the corresponding flowgraphs. */
6553 if (update_edge_count_p)
6554 FOR_EACH_EDGE (e, ei, bb->succs)
6556 cfun->cfg->x_n_edges--;
6557 dest_cfun->cfg->x_n_edges++;
6560 /* Remove BB from the original basic block array. */
6561 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6562 cfun->cfg->x_n_basic_blocks--;
6564 /* Grow DEST_CFUN's basic block array if needed. */
6565 cfg = dest_cfun->cfg;
6566 cfg->x_n_basic_blocks++;
6567 if (bb->index >= cfg->x_last_basic_block)
6568 cfg->x_last_basic_block = bb->index + 1;
6570 old_len = vec_safe_length (cfg->x_basic_block_info);
6571 if ((unsigned) cfg->x_last_basic_block >= old_len)
6573 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6574 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6577 (*cfg->x_basic_block_info)[bb->index] = bb;
6579 /* Remap the variables in phi nodes. */
6580 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6582 gimple phi = gsi_stmt (si);
6583 use_operand_p use;
6584 tree op = PHI_RESULT (phi);
6585 ssa_op_iter oi;
6586 unsigned i;
6588 if (virtual_operand_p (op))
6590 /* Remove the phi nodes for virtual operands (alias analysis will be
6591 run for the new function, anyway). */
6592 remove_phi_node (&si, true);
6593 continue;
6596 SET_PHI_RESULT (phi,
6597 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6598 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6600 op = USE_FROM_PTR (use);
6601 if (TREE_CODE (op) == SSA_NAME)
6602 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6605 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6607 location_t locus = gimple_phi_arg_location (phi, i);
6608 tree block = LOCATION_BLOCK (locus);
6610 if (locus == UNKNOWN_LOCATION)
6611 continue;
6612 if (d->orig_block == NULL_TREE || block == d->orig_block)
6614 if (d->new_block == NULL_TREE)
6615 locus = LOCATION_LOCUS (locus);
6616 else
6617 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6618 gimple_phi_arg_set_location (phi, i, locus);
6622 gsi_next (&si);
6625 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6627 gimple stmt = gsi_stmt (si);
6628 struct walk_stmt_info wi;
6630 memset (&wi, 0, sizeof (wi));
6631 wi.info = d;
6632 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6634 if (gimple_code (stmt) == GIMPLE_LABEL)
6636 tree label = gimple_label_label (stmt);
6637 int uid = LABEL_DECL_UID (label);
6639 gcc_assert (uid > -1);
6641 old_len = vec_safe_length (cfg->x_label_to_block_map);
6642 if (old_len <= (unsigned) uid)
6644 new_len = 3 * uid / 2 + 1;
6645 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6648 (*cfg->x_label_to_block_map)[uid] = bb;
6649 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6651 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6653 if (uid >= dest_cfun->cfg->last_label_uid)
6654 dest_cfun->cfg->last_label_uid = uid + 1;
6657 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6658 remove_stmt_from_eh_lp_fn (cfun, stmt);
6660 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6661 gimple_remove_stmt_histograms (cfun, stmt);
6663 /* We cannot leave any operands allocated from the operand caches of
6664 the current function. */
6665 free_stmt_operands (cfun, stmt);
6666 push_cfun (dest_cfun);
6667 update_stmt (stmt);
6668 pop_cfun ();
6671 FOR_EACH_EDGE (e, ei, bb->succs)
6672 if (e->goto_locus != UNKNOWN_LOCATION)
6674 tree block = LOCATION_BLOCK (e->goto_locus);
6675 if (d->orig_block == NULL_TREE
6676 || block == d->orig_block)
6677 e->goto_locus = d->new_block ?
6678 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6679 LOCATION_LOCUS (e->goto_locus);
6683 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6684 the outermost EH region. Use REGION as the incoming base EH region. */
6686 static eh_region
6687 find_outermost_region_in_block (struct function *src_cfun,
6688 basic_block bb, eh_region region)
6690 gimple_stmt_iterator si;
6692 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6694 gimple stmt = gsi_stmt (si);
6695 eh_region stmt_region;
6696 int lp_nr;
6698 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6699 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6700 if (stmt_region)
6702 if (region == NULL)
6703 region = stmt_region;
6704 else if (stmt_region != region)
6706 region = eh_region_outermost (src_cfun, stmt_region, region);
6707 gcc_assert (region != NULL);
6712 return region;
6715 static tree
6716 new_label_mapper (tree decl, void *data)
6718 htab_t hash = (htab_t) data;
6719 struct tree_map *m;
6720 void **slot;
6722 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6724 m = XNEW (struct tree_map);
6725 m->hash = DECL_UID (decl);
6726 m->base.from = decl;
6727 m->to = create_artificial_label (UNKNOWN_LOCATION);
6728 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6729 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6730 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6732 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6733 gcc_assert (*slot == NULL);
6735 *slot = m;
6737 return m->to;
6740 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6741 subblocks. */
6743 static void
6744 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6745 tree to_context)
6747 tree *tp, t;
6749 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6751 t = *tp;
6752 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6753 continue;
6754 replace_by_duplicate_decl (&t, vars_map, to_context);
6755 if (t != *tp)
6757 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6759 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6760 DECL_HAS_VALUE_EXPR_P (t) = 1;
6762 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6763 *tp = t;
6767 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6768 replace_block_vars_by_duplicates (block, vars_map, to_context);
6771 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6772 from FN1 to FN2. */
6774 static void
6775 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6776 struct loop *loop)
6778 /* Discard it from the old loop array. */
6779 (*get_loops (fn1))[loop->num] = NULL;
6781 /* Place it in the new loop array, assigning it a new number. */
6782 loop->num = number_of_loops (fn2);
6783 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6785 /* Recurse to children. */
6786 for (loop = loop->inner; loop; loop = loop->next)
6787 fixup_loop_arrays_after_move (fn1, fn2, loop);
6790 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6791 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6792 single basic block in the original CFG and the new basic block is
6793 returned. DEST_CFUN must not have a CFG yet.
6795 Note that the region need not be a pure SESE region. Blocks inside
6796 the region may contain calls to abort/exit. The only restriction
6797 is that ENTRY_BB should be the only entry point and it must
6798 dominate EXIT_BB.
6800 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6801 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6802 to the new function.
6804 All local variables referenced in the region are assumed to be in
6805 the corresponding BLOCK_VARS and unexpanded variable lists
6806 associated with DEST_CFUN. */
6808 basic_block
6809 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6810 basic_block exit_bb, tree orig_block)
6812 vec<basic_block> bbs, dom_bbs;
6813 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6814 basic_block after, bb, *entry_pred, *exit_succ, abb;
6815 struct function *saved_cfun = cfun;
6816 int *entry_flag, *exit_flag;
6817 unsigned *entry_prob, *exit_prob;
6818 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6819 edge e;
6820 edge_iterator ei;
6821 htab_t new_label_map;
6822 struct pointer_map_t *vars_map, *eh_map;
6823 struct loop *loop = entry_bb->loop_father;
6824 struct loop *loop0 = get_loop (saved_cfun, 0);
6825 struct move_stmt_d d;
6827 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6828 region. */
6829 gcc_assert (entry_bb != exit_bb
6830 && (!exit_bb
6831 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6833 /* Collect all the blocks in the region. Manually add ENTRY_BB
6834 because it won't be added by dfs_enumerate_from. */
6835 bbs.create (0);
6836 bbs.safe_push (entry_bb);
6837 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6839 /* The blocks that used to be dominated by something in BBS will now be
6840 dominated by the new block. */
6841 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6842 bbs.address (),
6843 bbs.length ());
6845 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6846 the predecessor edges to ENTRY_BB and the successor edges to
6847 EXIT_BB so that we can re-attach them to the new basic block that
6848 will replace the region. */
6849 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6850 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6851 entry_flag = XNEWVEC (int, num_entry_edges);
6852 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6853 i = 0;
6854 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6856 entry_prob[i] = e->probability;
6857 entry_flag[i] = e->flags;
6858 entry_pred[i++] = e->src;
6859 remove_edge (e);
6862 if (exit_bb)
6864 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6865 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6866 exit_flag = XNEWVEC (int, num_exit_edges);
6867 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6868 i = 0;
6869 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6871 exit_prob[i] = e->probability;
6872 exit_flag[i] = e->flags;
6873 exit_succ[i++] = e->dest;
6874 remove_edge (e);
6877 else
6879 num_exit_edges = 0;
6880 exit_succ = NULL;
6881 exit_flag = NULL;
6882 exit_prob = NULL;
6885 /* Switch context to the child function to initialize DEST_FN's CFG. */
6886 gcc_assert (dest_cfun->cfg == NULL);
6887 push_cfun (dest_cfun);
6889 init_empty_tree_cfg ();
6891 /* Initialize EH information for the new function. */
6892 eh_map = NULL;
6893 new_label_map = NULL;
6894 if (saved_cfun->eh)
6896 eh_region region = NULL;
6898 FOR_EACH_VEC_ELT (bbs, i, bb)
6899 region = find_outermost_region_in_block (saved_cfun, bb, region);
6901 init_eh_for_function ();
6902 if (region != NULL)
6904 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6905 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6906 new_label_mapper, new_label_map);
6910 /* Initialize an empty loop tree. */
6911 struct loops *loops = ggc_cleared_alloc<struct loops> ();
6912 init_loops_structure (dest_cfun, loops, 1);
6913 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6914 set_loops_for_fn (dest_cfun, loops);
6916 /* Move the outlined loop tree part. */
6917 num_nodes = bbs.length ();
6918 FOR_EACH_VEC_ELT (bbs, i, bb)
6920 if (bb->loop_father->header == bb)
6922 struct loop *this_loop = bb->loop_father;
6923 struct loop *outer = loop_outer (this_loop);
6924 if (outer == loop
6925 /* If the SESE region contains some bbs ending with
6926 a noreturn call, those are considered to belong
6927 to the outermost loop in saved_cfun, rather than
6928 the entry_bb's loop_father. */
6929 || outer == loop0)
6931 if (outer != loop)
6932 num_nodes -= this_loop->num_nodes;
6933 flow_loop_tree_node_remove (bb->loop_father);
6934 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6935 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6938 else if (bb->loop_father == loop0 && loop0 != loop)
6939 num_nodes--;
6941 /* Remove loop exits from the outlined region. */
6942 if (loops_for_fn (saved_cfun)->exits)
6943 FOR_EACH_EDGE (e, ei, bb->succs)
6945 void **slot = htab_find_slot_with_hash
6946 (loops_for_fn (saved_cfun)->exits, e,
6947 htab_hash_pointer (e), NO_INSERT);
6948 if (slot)
6949 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6954 /* Adjust the number of blocks in the tree root of the outlined part. */
6955 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6957 /* Setup a mapping to be used by move_block_to_fn. */
6958 loop->aux = current_loops->tree_root;
6959 loop0->aux = current_loops->tree_root;
6961 pop_cfun ();
6963 /* Move blocks from BBS into DEST_CFUN. */
6964 gcc_assert (bbs.length () >= 2);
6965 after = dest_cfun->cfg->x_entry_block_ptr;
6966 vars_map = pointer_map_create ();
6968 memset (&d, 0, sizeof (d));
6969 d.orig_block = orig_block;
6970 d.new_block = DECL_INITIAL (dest_cfun->decl);
6971 d.from_context = cfun->decl;
6972 d.to_context = dest_cfun->decl;
6973 d.vars_map = vars_map;
6974 d.new_label_map = new_label_map;
6975 d.eh_map = eh_map;
6976 d.remap_decls_p = true;
6978 FOR_EACH_VEC_ELT (bbs, i, bb)
6980 /* No need to update edge counts on the last block. It has
6981 already been updated earlier when we detached the region from
6982 the original CFG. */
6983 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6984 after = bb;
6987 loop->aux = NULL;
6988 loop0->aux = NULL;
6989 /* Loop sizes are no longer correct, fix them up. */
6990 loop->num_nodes -= num_nodes;
6991 for (struct loop *outer = loop_outer (loop);
6992 outer; outer = loop_outer (outer))
6993 outer->num_nodes -= num_nodes;
6994 loop0->num_nodes -= bbs.length () - num_nodes;
6996 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
6998 struct loop *aloop;
6999 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7000 if (aloop != NULL)
7002 if (aloop->simduid)
7004 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7005 d.to_context);
7006 dest_cfun->has_simduid_loops = true;
7008 if (aloop->force_vectorize)
7009 dest_cfun->has_force_vectorize_loops = true;
7013 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7014 if (orig_block)
7016 tree block;
7017 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7018 == NULL_TREE);
7019 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7020 = BLOCK_SUBBLOCKS (orig_block);
7021 for (block = BLOCK_SUBBLOCKS (orig_block);
7022 block; block = BLOCK_CHAIN (block))
7023 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7024 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7027 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7028 vars_map, dest_cfun->decl);
7030 if (new_label_map)
7031 htab_delete (new_label_map);
7032 if (eh_map)
7033 pointer_map_destroy (eh_map);
7034 pointer_map_destroy (vars_map);
7036 /* Rewire the entry and exit blocks. The successor to the entry
7037 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7038 the child function. Similarly, the predecessor of DEST_FN's
7039 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7040 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7041 various CFG manipulation function get to the right CFG.
7043 FIXME, this is silly. The CFG ought to become a parameter to
7044 these helpers. */
7045 push_cfun (dest_cfun);
7046 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7047 if (exit_bb)
7048 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7049 pop_cfun ();
7051 /* Back in the original function, the SESE region has disappeared,
7052 create a new basic block in its place. */
7053 bb = create_empty_bb (entry_pred[0]);
7054 if (current_loops)
7055 add_bb_to_loop (bb, loop);
7056 for (i = 0; i < num_entry_edges; i++)
7058 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7059 e->probability = entry_prob[i];
7062 for (i = 0; i < num_exit_edges; i++)
7064 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7065 e->probability = exit_prob[i];
7068 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7069 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7070 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7071 dom_bbs.release ();
7073 if (exit_bb)
7075 free (exit_prob);
7076 free (exit_flag);
7077 free (exit_succ);
7079 free (entry_prob);
7080 free (entry_flag);
7081 free (entry_pred);
7082 bbs.release ();
7084 return bb;
7088 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7091 void
7092 dump_function_to_file (tree fndecl, FILE *file, int flags)
7094 tree arg, var, old_current_fndecl = current_function_decl;
7095 struct function *dsf;
7096 bool ignore_topmost_bind = false, any_var = false;
7097 basic_block bb;
7098 tree chain;
7099 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7100 && decl_is_tm_clone (fndecl));
7101 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7103 current_function_decl = fndecl;
7104 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7106 arg = DECL_ARGUMENTS (fndecl);
7107 while (arg)
7109 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7110 fprintf (file, " ");
7111 print_generic_expr (file, arg, dump_flags);
7112 if (flags & TDF_VERBOSE)
7113 print_node (file, "", arg, 4);
7114 if (DECL_CHAIN (arg))
7115 fprintf (file, ", ");
7116 arg = DECL_CHAIN (arg);
7118 fprintf (file, ")\n");
7120 if (flags & TDF_VERBOSE)
7121 print_node (file, "", fndecl, 2);
7123 dsf = DECL_STRUCT_FUNCTION (fndecl);
7124 if (dsf && (flags & TDF_EH))
7125 dump_eh_tree (file, dsf);
7127 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7129 dump_node (fndecl, TDF_SLIM | flags, file);
7130 current_function_decl = old_current_fndecl;
7131 return;
7134 /* When GIMPLE is lowered, the variables are no longer available in
7135 BIND_EXPRs, so display them separately. */
7136 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7138 unsigned ix;
7139 ignore_topmost_bind = true;
7141 fprintf (file, "{\n");
7142 if (!vec_safe_is_empty (fun->local_decls))
7143 FOR_EACH_LOCAL_DECL (fun, ix, var)
7145 print_generic_decl (file, var, flags);
7146 if (flags & TDF_VERBOSE)
7147 print_node (file, "", var, 4);
7148 fprintf (file, "\n");
7150 any_var = true;
7152 if (gimple_in_ssa_p (cfun))
7153 for (ix = 1; ix < num_ssa_names; ++ix)
7155 tree name = ssa_name (ix);
7156 if (name && !SSA_NAME_VAR (name))
7158 fprintf (file, " ");
7159 print_generic_expr (file, TREE_TYPE (name), flags);
7160 fprintf (file, " ");
7161 print_generic_expr (file, name, flags);
7162 fprintf (file, ";\n");
7164 any_var = true;
7169 if (fun && fun->decl == fndecl
7170 && fun->cfg
7171 && basic_block_info_for_fn (fun))
7173 /* If the CFG has been built, emit a CFG-based dump. */
7174 if (!ignore_topmost_bind)
7175 fprintf (file, "{\n");
7177 if (any_var && n_basic_blocks_for_fn (fun))
7178 fprintf (file, "\n");
7180 FOR_EACH_BB_FN (bb, fun)
7181 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7183 fprintf (file, "}\n");
7185 else if (DECL_SAVED_TREE (fndecl) == NULL)
7187 /* The function is now in GIMPLE form but the CFG has not been
7188 built yet. Emit the single sequence of GIMPLE statements
7189 that make up its body. */
7190 gimple_seq body = gimple_body (fndecl);
7192 if (gimple_seq_first_stmt (body)
7193 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7194 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7195 print_gimple_seq (file, body, 0, flags);
7196 else
7198 if (!ignore_topmost_bind)
7199 fprintf (file, "{\n");
7201 if (any_var)
7202 fprintf (file, "\n");
7204 print_gimple_seq (file, body, 2, flags);
7205 fprintf (file, "}\n");
7208 else
7210 int indent;
7212 /* Make a tree based dump. */
7213 chain = DECL_SAVED_TREE (fndecl);
7214 if (chain && TREE_CODE (chain) == BIND_EXPR)
7216 if (ignore_topmost_bind)
7218 chain = BIND_EXPR_BODY (chain);
7219 indent = 2;
7221 else
7222 indent = 0;
7224 else
7226 if (!ignore_topmost_bind)
7227 fprintf (file, "{\n");
7228 indent = 2;
7231 if (any_var)
7232 fprintf (file, "\n");
7234 print_generic_stmt_indented (file, chain, flags, indent);
7235 if (ignore_topmost_bind)
7236 fprintf (file, "}\n");
7239 if (flags & TDF_ENUMERATE_LOCALS)
7240 dump_enumerated_decls (file, flags);
7241 fprintf (file, "\n\n");
7243 current_function_decl = old_current_fndecl;
7246 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7248 DEBUG_FUNCTION void
7249 debug_function (tree fn, int flags)
7251 dump_function_to_file (fn, stderr, flags);
7255 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7257 static void
7258 print_pred_bbs (FILE *file, basic_block bb)
7260 edge e;
7261 edge_iterator ei;
7263 FOR_EACH_EDGE (e, ei, bb->preds)
7264 fprintf (file, "bb_%d ", e->src->index);
7268 /* Print on FILE the indexes for the successors of basic_block BB. */
7270 static void
7271 print_succ_bbs (FILE *file, basic_block bb)
7273 edge e;
7274 edge_iterator ei;
7276 FOR_EACH_EDGE (e, ei, bb->succs)
7277 fprintf (file, "bb_%d ", e->dest->index);
7280 /* Print to FILE the basic block BB following the VERBOSITY level. */
7282 void
7283 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7285 char *s_indent = (char *) alloca ((size_t) indent + 1);
7286 memset ((void *) s_indent, ' ', (size_t) indent);
7287 s_indent[indent] = '\0';
7289 /* Print basic_block's header. */
7290 if (verbosity >= 2)
7292 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7293 print_pred_bbs (file, bb);
7294 fprintf (file, "}, succs = {");
7295 print_succ_bbs (file, bb);
7296 fprintf (file, "})\n");
7299 /* Print basic_block's body. */
7300 if (verbosity >= 3)
7302 fprintf (file, "%s {\n", s_indent);
7303 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7304 fprintf (file, "%s }\n", s_indent);
7308 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7310 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7311 VERBOSITY level this outputs the contents of the loop, or just its
7312 structure. */
7314 static void
7315 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7317 char *s_indent;
7318 basic_block bb;
7320 if (loop == NULL)
7321 return;
7323 s_indent = (char *) alloca ((size_t) indent + 1);
7324 memset ((void *) s_indent, ' ', (size_t) indent);
7325 s_indent[indent] = '\0';
7327 /* Print loop's header. */
7328 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7329 if (loop->header)
7330 fprintf (file, "header = %d", loop->header->index);
7331 else
7333 fprintf (file, "deleted)\n");
7334 return;
7336 if (loop->latch)
7337 fprintf (file, ", latch = %d", loop->latch->index);
7338 else
7339 fprintf (file, ", multiple latches");
7340 fprintf (file, ", niter = ");
7341 print_generic_expr (file, loop->nb_iterations, 0);
7343 if (loop->any_upper_bound)
7345 fprintf (file, ", upper_bound = ");
7346 print_decu (loop->nb_iterations_upper_bound, file);
7349 if (loop->any_estimate)
7351 fprintf (file, ", estimate = ");
7352 print_decu (loop->nb_iterations_estimate, file);
7354 fprintf (file, ")\n");
7356 /* Print loop's body. */
7357 if (verbosity >= 1)
7359 fprintf (file, "%s{\n", s_indent);
7360 FOR_EACH_BB_FN (bb, cfun)
7361 if (bb->loop_father == loop)
7362 print_loops_bb (file, bb, indent, verbosity);
7364 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7365 fprintf (file, "%s}\n", s_indent);
7369 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7370 spaces. Following VERBOSITY level this outputs the contents of the
7371 loop, or just its structure. */
7373 static void
7374 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7375 int verbosity)
7377 if (loop == NULL)
7378 return;
7380 print_loop (file, loop, indent, verbosity);
7381 print_loop_and_siblings (file, loop->next, indent, verbosity);
7384 /* Follow a CFG edge from the entry point of the program, and on entry
7385 of a loop, pretty print the loop structure on FILE. */
7387 void
7388 print_loops (FILE *file, int verbosity)
7390 basic_block bb;
7392 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7393 if (bb && bb->loop_father)
7394 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7397 /* Dump a loop. */
7399 DEBUG_FUNCTION void
7400 debug (struct loop &ref)
7402 print_loop (stderr, &ref, 0, /*verbosity*/0);
7405 DEBUG_FUNCTION void
7406 debug (struct loop *ptr)
7408 if (ptr)
7409 debug (*ptr);
7410 else
7411 fprintf (stderr, "<nil>\n");
7414 /* Dump a loop verbosely. */
7416 DEBUG_FUNCTION void
7417 debug_verbose (struct loop &ref)
7419 print_loop (stderr, &ref, 0, /*verbosity*/3);
7422 DEBUG_FUNCTION void
7423 debug_verbose (struct loop *ptr)
7425 if (ptr)
7426 debug (*ptr);
7427 else
7428 fprintf (stderr, "<nil>\n");
7432 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7434 DEBUG_FUNCTION void
7435 debug_loops (int verbosity)
7437 print_loops (stderr, verbosity);
7440 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7442 DEBUG_FUNCTION void
7443 debug_loop (struct loop *loop, int verbosity)
7445 print_loop (stderr, loop, 0, verbosity);
7448 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7449 level. */
7451 DEBUG_FUNCTION void
7452 debug_loop_num (unsigned num, int verbosity)
7454 debug_loop (get_loop (cfun, num), verbosity);
7457 /* Return true if BB ends with a call, possibly followed by some
7458 instructions that must stay with the call. Return false,
7459 otherwise. */
7461 static bool
7462 gimple_block_ends_with_call_p (basic_block bb)
7464 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7465 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7469 /* Return true if BB ends with a conditional branch. Return false,
7470 otherwise. */
7472 static bool
7473 gimple_block_ends_with_condjump_p (const_basic_block bb)
7475 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7476 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7480 /* Return true if we need to add fake edge to exit at statement T.
7481 Helper function for gimple_flow_call_edges_add. */
7483 static bool
7484 need_fake_edge_p (gimple t)
7486 tree fndecl = NULL_TREE;
7487 int call_flags = 0;
7489 /* NORETURN and LONGJMP calls already have an edge to exit.
7490 CONST and PURE calls do not need one.
7491 We don't currently check for CONST and PURE here, although
7492 it would be a good idea, because those attributes are
7493 figured out from the RTL in mark_constant_function, and
7494 the counter incrementation code from -fprofile-arcs
7495 leads to different results from -fbranch-probabilities. */
7496 if (is_gimple_call (t))
7498 fndecl = gimple_call_fndecl (t);
7499 call_flags = gimple_call_flags (t);
7502 if (is_gimple_call (t)
7503 && fndecl
7504 && DECL_BUILT_IN (fndecl)
7505 && (call_flags & ECF_NOTHROW)
7506 && !(call_flags & ECF_RETURNS_TWICE)
7507 /* fork() doesn't really return twice, but the effect of
7508 wrapping it in __gcov_fork() which calls __gcov_flush()
7509 and clears the counters before forking has the same
7510 effect as returning twice. Force a fake edge. */
7511 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7512 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7513 return false;
7515 if (is_gimple_call (t))
7517 edge_iterator ei;
7518 edge e;
7519 basic_block bb;
7521 if (!(call_flags & ECF_NORETURN))
7522 return true;
7524 bb = gimple_bb (t);
7525 FOR_EACH_EDGE (e, ei, bb->succs)
7526 if ((e->flags & EDGE_FAKE) == 0)
7527 return true;
7530 if (gimple_code (t) == GIMPLE_ASM
7531 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7532 return true;
7534 return false;
7538 /* Add fake edges to the function exit for any non constant and non
7539 noreturn calls (or noreturn calls with EH/abnormal edges),
7540 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7541 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7542 that were split.
7544 The goal is to expose cases in which entering a basic block does
7545 not imply that all subsequent instructions must be executed. */
7547 static int
7548 gimple_flow_call_edges_add (sbitmap blocks)
7550 int i;
7551 int blocks_split = 0;
7552 int last_bb = last_basic_block_for_fn (cfun);
7553 bool check_last_block = false;
7555 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7556 return 0;
7558 if (! blocks)
7559 check_last_block = true;
7560 else
7561 check_last_block = bitmap_bit_p (blocks,
7562 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7564 /* In the last basic block, before epilogue generation, there will be
7565 a fallthru edge to EXIT. Special care is required if the last insn
7566 of the last basic block is a call because make_edge folds duplicate
7567 edges, which would result in the fallthru edge also being marked
7568 fake, which would result in the fallthru edge being removed by
7569 remove_fake_edges, which would result in an invalid CFG.
7571 Moreover, we can't elide the outgoing fake edge, since the block
7572 profiler needs to take this into account in order to solve the minimal
7573 spanning tree in the case that the call doesn't return.
7575 Handle this by adding a dummy instruction in a new last basic block. */
7576 if (check_last_block)
7578 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7579 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7580 gimple t = NULL;
7582 if (!gsi_end_p (gsi))
7583 t = gsi_stmt (gsi);
7585 if (t && need_fake_edge_p (t))
7587 edge e;
7589 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7590 if (e)
7592 gsi_insert_on_edge (e, gimple_build_nop ());
7593 gsi_commit_edge_inserts ();
7598 /* Now add fake edges to the function exit for any non constant
7599 calls since there is no way that we can determine if they will
7600 return or not... */
7601 for (i = 0; i < last_bb; i++)
7603 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7604 gimple_stmt_iterator gsi;
7605 gimple stmt, last_stmt;
7607 if (!bb)
7608 continue;
7610 if (blocks && !bitmap_bit_p (blocks, i))
7611 continue;
7613 gsi = gsi_last_nondebug_bb (bb);
7614 if (!gsi_end_p (gsi))
7616 last_stmt = gsi_stmt (gsi);
7619 stmt = gsi_stmt (gsi);
7620 if (need_fake_edge_p (stmt))
7622 edge e;
7624 /* The handling above of the final block before the
7625 epilogue should be enough to verify that there is
7626 no edge to the exit block in CFG already.
7627 Calling make_edge in such case would cause us to
7628 mark that edge as fake and remove it later. */
7629 #ifdef ENABLE_CHECKING
7630 if (stmt == last_stmt)
7632 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7633 gcc_assert (e == NULL);
7635 #endif
7637 /* Note that the following may create a new basic block
7638 and renumber the existing basic blocks. */
7639 if (stmt != last_stmt)
7641 e = split_block (bb, stmt);
7642 if (e)
7643 blocks_split++;
7645 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7647 gsi_prev (&gsi);
7649 while (!gsi_end_p (gsi));
7653 if (blocks_split)
7654 verify_flow_info ();
7656 return blocks_split;
7659 /* Removes edge E and all the blocks dominated by it, and updates dominance
7660 information. The IL in E->src needs to be updated separately.
7661 If dominance info is not available, only the edge E is removed.*/
7663 void
7664 remove_edge_and_dominated_blocks (edge e)
7666 vec<basic_block> bbs_to_remove = vNULL;
7667 vec<basic_block> bbs_to_fix_dom = vNULL;
7668 bitmap df, df_idom;
7669 edge f;
7670 edge_iterator ei;
7671 bool none_removed = false;
7672 unsigned i;
7673 basic_block bb, dbb;
7674 bitmap_iterator bi;
7676 if (!dom_info_available_p (CDI_DOMINATORS))
7678 remove_edge (e);
7679 return;
7682 /* No updating is needed for edges to exit. */
7683 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7685 if (cfgcleanup_altered_bbs)
7686 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7687 remove_edge (e);
7688 return;
7691 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7692 that is not dominated by E->dest, then this set is empty. Otherwise,
7693 all the basic blocks dominated by E->dest are removed.
7695 Also, to DF_IDOM we store the immediate dominators of the blocks in
7696 the dominance frontier of E (i.e., of the successors of the
7697 removed blocks, if there are any, and of E->dest otherwise). */
7698 FOR_EACH_EDGE (f, ei, e->dest->preds)
7700 if (f == e)
7701 continue;
7703 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7705 none_removed = true;
7706 break;
7710 df = BITMAP_ALLOC (NULL);
7711 df_idom = BITMAP_ALLOC (NULL);
7713 if (none_removed)
7714 bitmap_set_bit (df_idom,
7715 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7716 else
7718 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7719 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7721 FOR_EACH_EDGE (f, ei, bb->succs)
7723 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7724 bitmap_set_bit (df, f->dest->index);
7727 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7728 bitmap_clear_bit (df, bb->index);
7730 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7732 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7733 bitmap_set_bit (df_idom,
7734 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7738 if (cfgcleanup_altered_bbs)
7740 /* Record the set of the altered basic blocks. */
7741 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7742 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7745 /* Remove E and the cancelled blocks. */
7746 if (none_removed)
7747 remove_edge (e);
7748 else
7750 /* Walk backwards so as to get a chance to substitute all
7751 released DEFs into debug stmts. See
7752 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7753 details. */
7754 for (i = bbs_to_remove.length (); i-- > 0; )
7755 delete_basic_block (bbs_to_remove[i]);
7758 /* Update the dominance information. The immediate dominator may change only
7759 for blocks whose immediate dominator belongs to DF_IDOM:
7761 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7762 removal. Let Z the arbitrary block such that idom(Z) = Y and
7763 Z dominates X after the removal. Before removal, there exists a path P
7764 from Y to X that avoids Z. Let F be the last edge on P that is
7765 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7766 dominates W, and because of P, Z does not dominate W), and W belongs to
7767 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7768 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7770 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7771 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7772 dbb;
7773 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7774 bbs_to_fix_dom.safe_push (dbb);
7777 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7779 BITMAP_FREE (df);
7780 BITMAP_FREE (df_idom);
7781 bbs_to_remove.release ();
7782 bbs_to_fix_dom.release ();
7785 /* Purge dead EH edges from basic block BB. */
7787 bool
7788 gimple_purge_dead_eh_edges (basic_block bb)
7790 bool changed = false;
7791 edge e;
7792 edge_iterator ei;
7793 gimple stmt = last_stmt (bb);
7795 if (stmt && stmt_can_throw_internal (stmt))
7796 return false;
7798 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7800 if (e->flags & EDGE_EH)
7802 remove_edge_and_dominated_blocks (e);
7803 changed = true;
7805 else
7806 ei_next (&ei);
7809 return changed;
7812 /* Purge dead EH edges from basic block listed in BLOCKS. */
7814 bool
7815 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7817 bool changed = false;
7818 unsigned i;
7819 bitmap_iterator bi;
7821 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7823 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7825 /* Earlier gimple_purge_dead_eh_edges could have removed
7826 this basic block already. */
7827 gcc_assert (bb || changed);
7828 if (bb != NULL)
7829 changed |= gimple_purge_dead_eh_edges (bb);
7832 return changed;
7835 /* Purge dead abnormal call edges from basic block BB. */
7837 bool
7838 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7840 bool changed = false;
7841 edge e;
7842 edge_iterator ei;
7843 gimple stmt = last_stmt (bb);
7845 if (!cfun->has_nonlocal_label
7846 && !cfun->calls_setjmp)
7847 return false;
7849 if (stmt && stmt_can_make_abnormal_goto (stmt))
7850 return false;
7852 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7854 if (e->flags & EDGE_ABNORMAL)
7856 if (e->flags & EDGE_FALLTHRU)
7857 e->flags &= ~EDGE_ABNORMAL;
7858 else
7859 remove_edge_and_dominated_blocks (e);
7860 changed = true;
7862 else
7863 ei_next (&ei);
7866 return changed;
7869 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7871 bool
7872 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7874 bool changed = false;
7875 unsigned i;
7876 bitmap_iterator bi;
7878 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7880 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7882 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7883 this basic block already. */
7884 gcc_assert (bb || changed);
7885 if (bb != NULL)
7886 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7889 return changed;
7892 /* This function is called whenever a new edge is created or
7893 redirected. */
7895 static void
7896 gimple_execute_on_growing_pred (edge e)
7898 basic_block bb = e->dest;
7900 if (!gimple_seq_empty_p (phi_nodes (bb)))
7901 reserve_phi_args_for_new_edge (bb);
7904 /* This function is called immediately before edge E is removed from
7905 the edge vector E->dest->preds. */
7907 static void
7908 gimple_execute_on_shrinking_pred (edge e)
7910 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7911 remove_phi_args (e);
7914 /*---------------------------------------------------------------------------
7915 Helper functions for Loop versioning
7916 ---------------------------------------------------------------------------*/
7918 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7919 of 'first'. Both of them are dominated by 'new_head' basic block. When
7920 'new_head' was created by 'second's incoming edge it received phi arguments
7921 on the edge by split_edge(). Later, additional edge 'e' was created to
7922 connect 'new_head' and 'first'. Now this routine adds phi args on this
7923 additional edge 'e' that new_head to second edge received as part of edge
7924 splitting. */
7926 static void
7927 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7928 basic_block new_head, edge e)
7930 gimple phi1, phi2;
7931 gimple_stmt_iterator psi1, psi2;
7932 tree def;
7933 edge e2 = find_edge (new_head, second);
7935 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7936 edge, we should always have an edge from NEW_HEAD to SECOND. */
7937 gcc_assert (e2 != NULL);
7939 /* Browse all 'second' basic block phi nodes and add phi args to
7940 edge 'e' for 'first' head. PHI args are always in correct order. */
7942 for (psi2 = gsi_start_phis (second),
7943 psi1 = gsi_start_phis (first);
7944 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7945 gsi_next (&psi2), gsi_next (&psi1))
7947 phi1 = gsi_stmt (psi1);
7948 phi2 = gsi_stmt (psi2);
7949 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7950 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7955 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7956 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7957 the destination of the ELSE part. */
7959 static void
7960 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7961 basic_block second_head ATTRIBUTE_UNUSED,
7962 basic_block cond_bb, void *cond_e)
7964 gimple_stmt_iterator gsi;
7965 gimple new_cond_expr;
7966 tree cond_expr = (tree) cond_e;
7967 edge e0;
7969 /* Build new conditional expr */
7970 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7971 NULL_TREE, NULL_TREE);
7973 /* Add new cond in cond_bb. */
7974 gsi = gsi_last_bb (cond_bb);
7975 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7977 /* Adjust edges appropriately to connect new head with first head
7978 as well as second head. */
7979 e0 = single_succ_edge (cond_bb);
7980 e0->flags &= ~EDGE_FALLTHRU;
7981 e0->flags |= EDGE_FALSE_VALUE;
7985 /* Do book-keeping of basic block BB for the profile consistency checker.
7986 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7987 then do post-pass accounting. Store the counting in RECORD. */
7988 static void
7989 gimple_account_profile_record (basic_block bb, int after_pass,
7990 struct profile_record *record)
7992 gimple_stmt_iterator i;
7993 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7995 record->size[after_pass]
7996 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7997 if (profile_status_for_fn (cfun) == PROFILE_READ)
7998 record->time[after_pass]
7999 += estimate_num_insns (gsi_stmt (i),
8000 &eni_time_weights) * bb->count;
8001 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8002 record->time[after_pass]
8003 += estimate_num_insns (gsi_stmt (i),
8004 &eni_time_weights) * bb->frequency;
8008 struct cfg_hooks gimple_cfg_hooks = {
8009 "gimple",
8010 gimple_verify_flow_info,
8011 gimple_dump_bb, /* dump_bb */
8012 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8013 create_bb, /* create_basic_block */
8014 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8015 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8016 gimple_can_remove_branch_p, /* can_remove_branch_p */
8017 remove_bb, /* delete_basic_block */
8018 gimple_split_block, /* split_block */
8019 gimple_move_block_after, /* move_block_after */
8020 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8021 gimple_merge_blocks, /* merge_blocks */
8022 gimple_predict_edge, /* predict_edge */
8023 gimple_predicted_by_p, /* predicted_by_p */
8024 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8025 gimple_duplicate_bb, /* duplicate_block */
8026 gimple_split_edge, /* split_edge */
8027 gimple_make_forwarder_block, /* make_forward_block */
8028 NULL, /* tidy_fallthru_edge */
8029 NULL, /* force_nonfallthru */
8030 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8031 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8032 gimple_flow_call_edges_add, /* flow_call_edges_add */
8033 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8034 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8035 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8036 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8037 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8038 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8039 flush_pending_stmts, /* flush_pending_stmts */
8040 gimple_empty_block_p, /* block_empty_p */
8041 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8042 gimple_account_profile_record,
8046 /* Split all critical edges. */
8048 unsigned int
8049 split_critical_edges (void)
8051 basic_block bb;
8052 edge e;
8053 edge_iterator ei;
8055 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8056 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8057 mappings around the calls to split_edge. */
8058 start_recording_case_labels ();
8059 FOR_ALL_BB_FN (bb, cfun)
8061 FOR_EACH_EDGE (e, ei, bb->succs)
8063 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8064 split_edge (e);
8065 /* PRE inserts statements to edges and expects that
8066 since split_critical_edges was done beforehand, committing edge
8067 insertions will not split more edges. In addition to critical
8068 edges we must split edges that have multiple successors and
8069 end by control flow statements, such as RESX.
8070 Go ahead and split them too. This matches the logic in
8071 gimple_find_edge_insert_loc. */
8072 else if ((!single_pred_p (e->dest)
8073 || !gimple_seq_empty_p (phi_nodes (e->dest))
8074 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8075 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8076 && !(e->flags & EDGE_ABNORMAL))
8078 gimple_stmt_iterator gsi;
8080 gsi = gsi_last_bb (e->src);
8081 if (!gsi_end_p (gsi)
8082 && stmt_ends_bb_p (gsi_stmt (gsi))
8083 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8084 && !gimple_call_builtin_p (gsi_stmt (gsi),
8085 BUILT_IN_RETURN)))
8086 split_edge (e);
8090 end_recording_case_labels ();
8091 return 0;
8094 namespace {
8096 const pass_data pass_data_split_crit_edges =
8098 GIMPLE_PASS, /* type */
8099 "crited", /* name */
8100 OPTGROUP_NONE, /* optinfo_flags */
8101 true, /* has_execute */
8102 TV_TREE_SPLIT_EDGES, /* tv_id */
8103 PROP_cfg, /* properties_required */
8104 PROP_no_crit_edges, /* properties_provided */
8105 0, /* properties_destroyed */
8106 0, /* todo_flags_start */
8107 0, /* todo_flags_finish */
8110 class pass_split_crit_edges : public gimple_opt_pass
8112 public:
8113 pass_split_crit_edges (gcc::context *ctxt)
8114 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8117 /* opt_pass methods: */
8118 virtual unsigned int execute (function *) { return split_critical_edges (); }
8120 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8121 }; // class pass_split_crit_edges
8123 } // anon namespace
8125 gimple_opt_pass *
8126 make_pass_split_crit_edges (gcc::context *ctxt)
8128 return new pass_split_crit_edges (ctxt);
8132 /* Build a ternary operation and gimplify it. Emit code before GSI.
8133 Return the gimple_val holding the result. */
8135 tree
8136 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8137 tree type, tree a, tree b, tree c)
8139 tree ret;
8140 location_t loc = gimple_location (gsi_stmt (*gsi));
8142 ret = fold_build3_loc (loc, code, type, a, b, c);
8143 STRIP_NOPS (ret);
8145 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8146 GSI_SAME_STMT);
8149 /* Build a binary operation and gimplify it. Emit code before GSI.
8150 Return the gimple_val holding the result. */
8152 tree
8153 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8154 tree type, tree a, tree b)
8156 tree ret;
8158 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8159 STRIP_NOPS (ret);
8161 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8162 GSI_SAME_STMT);
8165 /* Build a unary operation and gimplify it. Emit code before GSI.
8166 Return the gimple_val holding the result. */
8168 tree
8169 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8170 tree a)
8172 tree ret;
8174 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8175 STRIP_NOPS (ret);
8177 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8178 GSI_SAME_STMT);
8183 /* Given a basic block B which ends with a conditional and has
8184 precisely two successors, determine which of the edges is taken if
8185 the conditional is true and which is taken if the conditional is
8186 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8188 void
8189 extract_true_false_edges_from_block (basic_block b,
8190 edge *true_edge,
8191 edge *false_edge)
8193 edge e = EDGE_SUCC (b, 0);
8195 if (e->flags & EDGE_TRUE_VALUE)
8197 *true_edge = e;
8198 *false_edge = EDGE_SUCC (b, 1);
8200 else
8202 *false_edge = e;
8203 *true_edge = EDGE_SUCC (b, 1);
8207 /* Emit return warnings. */
8209 namespace {
8211 const pass_data pass_data_warn_function_return =
8213 GIMPLE_PASS, /* type */
8214 "*warn_function_return", /* name */
8215 OPTGROUP_NONE, /* optinfo_flags */
8216 true, /* has_execute */
8217 TV_NONE, /* tv_id */
8218 PROP_cfg, /* properties_required */
8219 0, /* properties_provided */
8220 0, /* properties_destroyed */
8221 0, /* todo_flags_start */
8222 0, /* todo_flags_finish */
8225 class pass_warn_function_return : public gimple_opt_pass
8227 public:
8228 pass_warn_function_return (gcc::context *ctxt)
8229 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8232 /* opt_pass methods: */
8233 virtual unsigned int execute (function *);
8235 }; // class pass_warn_function_return
8237 unsigned int
8238 pass_warn_function_return::execute (function *fun)
8240 source_location location;
8241 gimple last;
8242 edge e;
8243 edge_iterator ei;
8245 if (!targetm.warn_func_return (fun->decl))
8246 return 0;
8248 /* If we have a path to EXIT, then we do return. */
8249 if (TREE_THIS_VOLATILE (fun->decl)
8250 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8252 location = UNKNOWN_LOCATION;
8253 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8255 last = last_stmt (e->src);
8256 if ((gimple_code (last) == GIMPLE_RETURN
8257 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8258 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8259 break;
8261 if (location == UNKNOWN_LOCATION)
8262 location = cfun->function_end_locus;
8263 warning_at (location, 0, "%<noreturn%> function does return");
8266 /* If we see "return;" in some basic block, then we do reach the end
8267 without returning a value. */
8268 else if (warn_return_type
8269 && !TREE_NO_WARNING (fun->decl)
8270 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8271 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8273 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8275 gimple last = last_stmt (e->src);
8276 if (gimple_code (last) == GIMPLE_RETURN
8277 && gimple_return_retval (last) == NULL
8278 && !gimple_no_warning_p (last))
8280 location = gimple_location (last);
8281 if (location == UNKNOWN_LOCATION)
8282 location = fun->function_end_locus;
8283 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8284 TREE_NO_WARNING (fun->decl) = 1;
8285 break;
8289 return 0;
8292 } // anon namespace
8294 gimple_opt_pass *
8295 make_pass_warn_function_return (gcc::context *ctxt)
8297 return new pass_warn_function_return (ctxt);
8300 /* Walk a gimplified function and warn for functions whose return value is
8301 ignored and attribute((warn_unused_result)) is set. This is done before
8302 inlining, so we don't have to worry about that. */
8304 static void
8305 do_warn_unused_result (gimple_seq seq)
8307 tree fdecl, ftype;
8308 gimple_stmt_iterator i;
8310 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8312 gimple g = gsi_stmt (i);
8314 switch (gimple_code (g))
8316 case GIMPLE_BIND:
8317 do_warn_unused_result (gimple_bind_body (g));
8318 break;
8319 case GIMPLE_TRY:
8320 do_warn_unused_result (gimple_try_eval (g));
8321 do_warn_unused_result (gimple_try_cleanup (g));
8322 break;
8323 case GIMPLE_CATCH:
8324 do_warn_unused_result (gimple_catch_handler (g));
8325 break;
8326 case GIMPLE_EH_FILTER:
8327 do_warn_unused_result (gimple_eh_filter_failure (g));
8328 break;
8330 case GIMPLE_CALL:
8331 if (gimple_call_lhs (g))
8332 break;
8333 if (gimple_call_internal_p (g))
8334 break;
8336 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8337 LHS. All calls whose value is ignored should be
8338 represented like this. Look for the attribute. */
8339 fdecl = gimple_call_fndecl (g);
8340 ftype = gimple_call_fntype (g);
8342 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8344 location_t loc = gimple_location (g);
8346 if (fdecl)
8347 warning_at (loc, OPT_Wunused_result,
8348 "ignoring return value of %qD, "
8349 "declared with attribute warn_unused_result",
8350 fdecl);
8351 else
8352 warning_at (loc, OPT_Wunused_result,
8353 "ignoring return value of function "
8354 "declared with attribute warn_unused_result");
8356 break;
8358 default:
8359 /* Not a container, not a call, or a call whose value is used. */
8360 break;
8365 namespace {
8367 const pass_data pass_data_warn_unused_result =
8369 GIMPLE_PASS, /* type */
8370 "*warn_unused_result", /* name */
8371 OPTGROUP_NONE, /* optinfo_flags */
8372 true, /* has_execute */
8373 TV_NONE, /* tv_id */
8374 PROP_gimple_any, /* properties_required */
8375 0, /* properties_provided */
8376 0, /* properties_destroyed */
8377 0, /* todo_flags_start */
8378 0, /* todo_flags_finish */
8381 class pass_warn_unused_result : public gimple_opt_pass
8383 public:
8384 pass_warn_unused_result (gcc::context *ctxt)
8385 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8388 /* opt_pass methods: */
8389 virtual bool gate (function *) { return flag_warn_unused_result; }
8390 virtual unsigned int execute (function *)
8392 do_warn_unused_result (gimple_body (current_function_decl));
8393 return 0;
8396 }; // class pass_warn_unused_result
8398 } // anon namespace
8400 gimple_opt_pass *
8401 make_pass_warn_unused_result (gcc::context *ctxt)
8403 return new pass_warn_unused_result (ctxt);
8406 /* IPA passes, compilation of earlier functions or inlining
8407 might have changed some properties, such as marked functions nothrow,
8408 pure, const or noreturn.
8409 Remove redundant edges and basic blocks, and create new ones if necessary.
8411 This pass can't be executed as stand alone pass from pass manager, because
8412 in between inlining and this fixup the verify_flow_info would fail. */
8414 unsigned int
8415 execute_fixup_cfg (void)
8417 basic_block bb;
8418 gimple_stmt_iterator gsi;
8419 int todo = 0;
8420 gcov_type count_scale;
8421 edge e;
8422 edge_iterator ei;
8424 count_scale
8425 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8426 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8428 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8429 cgraph_get_node (current_function_decl)->count;
8430 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8431 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8432 count_scale);
8434 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8435 e->count = apply_scale (e->count, count_scale);
8437 FOR_EACH_BB_FN (bb, cfun)
8439 bb->count = apply_scale (bb->count, count_scale);
8440 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8442 gimple stmt = gsi_stmt (gsi);
8443 tree decl = is_gimple_call (stmt)
8444 ? gimple_call_fndecl (stmt)
8445 : NULL;
8446 if (decl)
8448 int flags = gimple_call_flags (stmt);
8449 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8451 if (gimple_purge_dead_abnormal_call_edges (bb))
8452 todo |= TODO_cleanup_cfg;
8454 if (gimple_in_ssa_p (cfun))
8456 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8457 update_stmt (stmt);
8461 if (flags & ECF_NORETURN
8462 && fixup_noreturn_call (stmt))
8463 todo |= TODO_cleanup_cfg;
8466 /* Remove stores to variables we marked write-only.
8467 Keep access when store has side effect, i.e. in case when source
8468 is volatile. */
8469 if (gimple_store_p (stmt)
8470 && !gimple_has_side_effects (stmt))
8472 tree lhs = get_base_address (gimple_get_lhs (stmt));
8474 if (TREE_CODE (lhs) == VAR_DECL
8475 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8476 && varpool_get_node (lhs)->writeonly)
8478 unlink_stmt_vdef (stmt);
8479 gsi_remove (&gsi, true);
8480 release_defs (stmt);
8481 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8482 continue;
8485 /* For calls we can simply remove LHS when it is known
8486 to be write-only. */
8487 if (is_gimple_call (stmt)
8488 && gimple_get_lhs (stmt))
8490 tree lhs = get_base_address (gimple_get_lhs (stmt));
8492 if (TREE_CODE (lhs) == VAR_DECL
8493 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8494 && varpool_get_node (lhs)->writeonly)
8496 gimple_call_set_lhs (stmt, NULL);
8497 update_stmt (stmt);
8498 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8502 if (maybe_clean_eh_stmt (stmt)
8503 && gimple_purge_dead_eh_edges (bb))
8504 todo |= TODO_cleanup_cfg;
8505 gsi_next (&gsi);
8508 FOR_EACH_EDGE (e, ei, bb->succs)
8509 e->count = apply_scale (e->count, count_scale);
8511 /* If we have a basic block with no successors that does not
8512 end with a control statement or a noreturn call end it with
8513 a call to __builtin_unreachable. This situation can occur
8514 when inlining a noreturn call that does in fact return. */
8515 if (EDGE_COUNT (bb->succs) == 0)
8517 gimple stmt = last_stmt (bb);
8518 if (!stmt
8519 || (!is_ctrl_stmt (stmt)
8520 && (!is_gimple_call (stmt)
8521 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8523 stmt = gimple_build_call
8524 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8525 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8526 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8530 if (count_scale != REG_BR_PROB_BASE)
8531 compute_function_frequency ();
8533 /* We just processed all calls. */
8534 if (cfun->gimple_df)
8535 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8537 /* Dump a textual representation of the flowgraph. */
8538 if (dump_file)
8539 gimple_dump_cfg (dump_file, dump_flags);
8541 if (current_loops
8542 && (todo & TODO_cleanup_cfg))
8543 loops_state_set (LOOPS_NEED_FIXUP);
8545 return todo;
8548 namespace {
8550 const pass_data pass_data_fixup_cfg =
8552 GIMPLE_PASS, /* type */
8553 "*free_cfg_annotations", /* name */
8554 OPTGROUP_NONE, /* optinfo_flags */
8555 true, /* has_execute */
8556 TV_NONE, /* tv_id */
8557 PROP_cfg, /* properties_required */
8558 0, /* properties_provided */
8559 0, /* properties_destroyed */
8560 0, /* todo_flags_start */
8561 0, /* todo_flags_finish */
8564 class pass_fixup_cfg : public gimple_opt_pass
8566 public:
8567 pass_fixup_cfg (gcc::context *ctxt)
8568 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8571 /* opt_pass methods: */
8572 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8573 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8575 }; // class pass_fixup_cfg
8577 } // anon namespace
8579 gimple_opt_pass *
8580 make_pass_fixup_cfg (gcc::context *ctxt)
8582 return new pass_fixup_cfg (ctxt);
8585 /* Garbage collection support for edge_def. */
8587 extern void gt_ggc_mx (tree&);
8588 extern void gt_ggc_mx (gimple&);
8589 extern void gt_ggc_mx (rtx&);
8590 extern void gt_ggc_mx (basic_block&);
8592 void
8593 gt_ggc_mx (edge_def *e)
8595 tree block = LOCATION_BLOCK (e->goto_locus);
8596 gt_ggc_mx (e->src);
8597 gt_ggc_mx (e->dest);
8598 if (current_ir_type () == IR_GIMPLE)
8599 gt_ggc_mx (e->insns.g);
8600 else
8601 gt_ggc_mx (e->insns.r);
8602 gt_ggc_mx (block);
8605 /* PCH support for edge_def. */
8607 extern void gt_pch_nx (tree&);
8608 extern void gt_pch_nx (gimple&);
8609 extern void gt_pch_nx (rtx&);
8610 extern void gt_pch_nx (basic_block&);
8612 void
8613 gt_pch_nx (edge_def *e)
8615 tree block = LOCATION_BLOCK (e->goto_locus);
8616 gt_pch_nx (e->src);
8617 gt_pch_nx (e->dest);
8618 if (current_ir_type () == IR_GIMPLE)
8619 gt_pch_nx (e->insns.g);
8620 else
8621 gt_pch_nx (e->insns.r);
8622 gt_pch_nx (block);
8625 void
8626 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8628 tree block = LOCATION_BLOCK (e->goto_locus);
8629 op (&(e->src), cookie);
8630 op (&(e->dest), cookie);
8631 if (current_ir_type () == IR_GIMPLE)
8632 op (&(e->insns.g), cookie);
8633 else
8634 op (&(e->insns.r), cookie);
8635 op (&(block), cookie);