PR preprocessor/60723 - missing system-ness marks for macro tokens
[official-gcc.git] / gcc / tree-cfg.c
blobabf09d5304d002641634ea45e68c7c8939825a1f
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71 #include "wide-int.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static struct pointer_map_t *edge_to_cases;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs;
103 /* CFG statistics. */
104 struct cfg_stats_d
106 long num_merged_labels;
109 static struct cfg_stats_d cfg_stats;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
114 location_t locus;
115 int discriminator;
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 typedef locus_discrim_map value_type;
123 typedef locus_discrim_map compare_type;
124 static inline hashval_t hash (const value_type *);
125 static inline bool equal (const value_type *, const compare_type *);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
131 inline hashval_t
132 locus_discrim_hasher::hash (const value_type *item)
134 return LOCATION_LINE (item->locus);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
140 inline bool
141 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple, gimple);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gimple);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (basic_block, tree);
174 static edge find_taken_edge_switch_expr (basic_block, tree);
175 static tree find_case_label_for_value (gimple, tree);
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
257 them and propagate the information to the loop. We assume that the
258 annotations come immediately before the condition of the loop. */
260 static void
261 replace_loop_annotate ()
263 struct loop *loop;
264 basic_block bb;
265 gimple_stmt_iterator gsi;
266 gimple stmt;
268 FOR_EACH_LOOP (loop, 0)
270 gsi = gsi_last_bb (loop->header);
271 stmt = gsi_stmt (gsi);
272 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
273 continue;
274 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
276 stmt = gsi_stmt (gsi);
277 if (gimple_code (stmt) != GIMPLE_CALL)
278 break;
279 if (!gimple_call_internal_p (stmt)
280 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
281 break;
282 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
284 case annot_expr_ivdep_kind:
285 loop->safelen = INT_MAX;
286 break;
287 case annot_expr_no_vector_kind:
288 loop->dont_vectorize = true;
289 break;
290 case annot_expr_vector_kind:
291 loop->force_vectorize = true;
292 cfun->has_force_vectorize_loops = true;
293 break;
294 default:
295 gcc_unreachable ();
297 stmt = gimple_build_assign (gimple_call_lhs (stmt),
298 gimple_call_arg (stmt, 0));
299 gsi_replace (&gsi, stmt, true);
303 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
304 FOR_EACH_BB_FN (bb, cfun)
306 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
308 stmt = gsi_stmt (gsi);
309 if (gimple_code (stmt) != GIMPLE_CALL)
310 break;
311 if (!gimple_call_internal_p (stmt)
312 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
313 break;
314 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
316 case annot_expr_ivdep_kind:
317 case annot_expr_no_vector_kind:
318 case annot_expr_vector_kind:
319 break;
320 default:
321 gcc_unreachable ();
323 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
324 stmt = gimple_build_assign (gimple_call_lhs (stmt),
325 gimple_call_arg (stmt, 0));
326 gsi_replace (&gsi, stmt, true);
332 static unsigned int
333 execute_build_cfg (void)
335 gimple_seq body = gimple_body (current_function_decl);
337 build_gimple_cfg (body);
338 gimple_set_body (current_function_decl, NULL);
339 if (dump_file && (dump_flags & TDF_DETAILS))
341 fprintf (dump_file, "Scope blocks:\n");
342 dump_scope_blocks (dump_file, dump_flags);
344 cleanup_tree_cfg ();
345 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
346 replace_loop_annotate ();
347 return 0;
350 namespace {
352 const pass_data pass_data_build_cfg =
354 GIMPLE_PASS, /* type */
355 "cfg", /* name */
356 OPTGROUP_NONE, /* optinfo_flags */
357 true, /* has_execute */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 0, /* todo_flags_finish */
366 class pass_build_cfg : public gimple_opt_pass
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
373 /* opt_pass methods: */
374 virtual unsigned int execute (function *) { return execute_build_cfg (); }
376 }; // class pass_build_cfg
378 } // anon namespace
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
383 return new pass_build_cfg (ctxt);
387 /* Return true if T is a computed goto. */
389 bool
390 computed_goto_p (gimple t)
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
429 gsi_next (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
437 return false;
441 /* Build a flowgraph for the sequence of stmts SEQ. */
443 static void
444 make_blocks (gimple_seq seq)
446 gimple_stmt_iterator i = gsi_start (seq);
447 gimple stmt = NULL;
448 bool start_new_block = true;
449 bool first_stmt_of_seq = true;
450 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
452 while (!gsi_end_p (i))
454 gimple prev_stmt;
456 prev_stmt = stmt;
457 stmt = gsi_stmt (i);
459 /* If the statement starts a new basic block or if we have determined
460 in a previous pass that we need to create a new block for STMT, do
461 so now. */
462 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
464 if (!first_stmt_of_seq)
465 gsi_split_seq_before (&i, &seq);
466 bb = create_basic_block (seq, NULL, bb);
467 start_new_block = false;
470 /* Now add STMT to BB and create the subgraphs for special statement
471 codes. */
472 gimple_set_bb (stmt, bb);
474 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
475 next iteration. */
476 if (stmt_ends_bb_p (stmt))
478 /* If the stmt can make abnormal goto use a new temporary
479 for the assignment to the LHS. This makes sure the old value
480 of the LHS is available on the abnormal edge. Otherwise
481 we will end up with overlapping life-ranges for abnormal
482 SSA names. */
483 if (gimple_has_lhs (stmt)
484 && stmt_can_make_abnormal_goto (stmt)
485 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
487 tree lhs = gimple_get_lhs (stmt);
488 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
489 gimple s = gimple_build_assign (lhs, tmp);
490 gimple_set_location (s, gimple_location (stmt));
491 gimple_set_block (s, gimple_block (stmt));
492 gimple_set_lhs (stmt, tmp);
493 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
494 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
495 DECL_GIMPLE_REG_P (tmp) = 1;
496 gsi_insert_after (&i, s, GSI_SAME_STMT);
498 start_new_block = true;
501 gsi_next (&i);
502 first_stmt_of_seq = false;
507 /* Create and return a new empty basic block after bb AFTER. */
509 static basic_block
510 create_bb (void *h, void *e, basic_block after)
512 basic_block bb;
514 gcc_assert (!e);
516 /* Create and initialize a new basic block. Since alloc_block uses
517 GC allocation that clears memory to allocate a basic block, we do
518 not have to clear the newly allocated basic block here. */
519 bb = alloc_block ();
521 bb->index = last_basic_block_for_fn (cfun);
522 bb->flags = BB_NEW;
523 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
525 /* Add the new block to the linked list of blocks. */
526 link_block (bb, after);
528 /* Grow the basic block array if needed. */
529 if ((size_t) last_basic_block_for_fn (cfun)
530 == basic_block_info_for_fn (cfun)->length ())
532 size_t new_size =
533 (last_basic_block_for_fn (cfun)
534 + (last_basic_block_for_fn (cfun) + 3) / 4);
535 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
538 /* Add the newly created block to the array. */
539 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
541 n_basic_blocks_for_fn (cfun)++;
542 last_basic_block_for_fn (cfun)++;
544 return bb;
548 /*---------------------------------------------------------------------------
549 Edge creation
550 ---------------------------------------------------------------------------*/
552 /* Fold COND_EXPR_COND of each COND_EXPR. */
554 void
555 fold_cond_expr_cond (void)
557 basic_block bb;
559 FOR_EACH_BB_FN (bb, cfun)
561 gimple stmt = last_stmt (bb);
563 if (stmt && gimple_code (stmt) == GIMPLE_COND)
565 location_t loc = gimple_location (stmt);
566 tree cond;
567 bool zerop, onep;
569 fold_defer_overflow_warnings ();
570 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
571 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
572 if (cond)
574 zerop = integer_zerop (cond);
575 onep = integer_onep (cond);
577 else
578 zerop = onep = false;
580 fold_undefer_overflow_warnings (zerop || onep,
581 stmt,
582 WARN_STRICT_OVERFLOW_CONDITIONAL);
583 if (zerop)
584 gimple_cond_make_false (stmt);
585 else if (onep)
586 gimple_cond_make_true (stmt);
591 /* If basic block BB has an abnormal edge to a basic block
592 containing IFN_ABNORMAL_DISPATCHER internal call, return
593 that the dispatcher's basic block, otherwise return NULL. */
595 basic_block
596 get_abnormal_succ_dispatcher (basic_block bb)
598 edge e;
599 edge_iterator ei;
601 FOR_EACH_EDGE (e, ei, bb->succs)
602 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
604 gimple_stmt_iterator gsi
605 = gsi_start_nondebug_after_labels_bb (e->dest);
606 gimple g = gsi_stmt (gsi);
607 if (g
608 && is_gimple_call (g)
609 && gimple_call_internal_p (g)
610 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
611 return e->dest;
613 return NULL;
616 /* Helper function for make_edges. Create a basic block with
617 with ABNORMAL_DISPATCHER internal call in it if needed, and
618 create abnormal edges from BBS to it and from it to FOR_BB
619 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
621 static void
622 handle_abnormal_edges (basic_block *dispatcher_bbs,
623 basic_block for_bb, int *bb_to_omp_idx,
624 auto_vec<basic_block> *bbs, bool computed_goto)
626 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
627 unsigned int idx = 0;
628 basic_block bb;
629 bool inner = false;
631 if (bb_to_omp_idx)
633 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
634 if (bb_to_omp_idx[for_bb->index] != 0)
635 inner = true;
638 /* If the dispatcher has been created already, then there are basic
639 blocks with abnormal edges to it, so just make a new edge to
640 for_bb. */
641 if (*dispatcher == NULL)
643 /* Check if there are any basic blocks that need to have
644 abnormal edges to this dispatcher. If there are none, return
645 early. */
646 if (bb_to_omp_idx == NULL)
648 if (bbs->is_empty ())
649 return;
651 else
653 FOR_EACH_VEC_ELT (*bbs, idx, bb)
654 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
655 break;
656 if (bb == NULL)
657 return;
660 /* Create the dispatcher bb. */
661 *dispatcher = create_basic_block (NULL, NULL, for_bb);
662 if (computed_goto)
664 /* Factor computed gotos into a common computed goto site. Also
665 record the location of that site so that we can un-factor the
666 gotos after we have converted back to normal form. */
667 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
669 /* Create the destination of the factored goto. Each original
670 computed goto will put its desired destination into this
671 variable and jump to the label we create immediately below. */
672 tree var = create_tmp_var (ptr_type_node, "gotovar");
674 /* Build a label for the new block which will contain the
675 factored computed goto. */
676 tree factored_label_decl
677 = create_artificial_label (UNKNOWN_LOCATION);
678 gimple factored_computed_goto_label
679 = gimple_build_label (factored_label_decl);
680 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
682 /* Build our new computed goto. */
683 gimple factored_computed_goto = gimple_build_goto (var);
684 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
686 FOR_EACH_VEC_ELT (*bbs, idx, bb)
688 if (bb_to_omp_idx
689 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
690 continue;
692 gsi = gsi_last_bb (bb);
693 gimple last = gsi_stmt (gsi);
695 gcc_assert (computed_goto_p (last));
697 /* Copy the original computed goto's destination into VAR. */
698 gimple assignment
699 = gimple_build_assign (var, gimple_goto_dest (last));
700 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
702 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
703 e->goto_locus = gimple_location (last);
704 gsi_remove (&gsi, true);
707 else
709 tree arg = inner ? boolean_true_node : boolean_false_node;
710 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
711 1, arg);
712 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
713 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
715 /* Create predecessor edges of the dispatcher. */
716 FOR_EACH_VEC_ELT (*bbs, idx, bb)
718 if (bb_to_omp_idx
719 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
720 continue;
721 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
726 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
729 /* Join all the blocks in the flowgraph. */
731 static void
732 make_edges (void)
734 basic_block bb;
735 struct omp_region *cur_region = NULL;
736 auto_vec<basic_block> ab_edge_goto;
737 auto_vec<basic_block> ab_edge_call;
738 int *bb_to_omp_idx = NULL;
739 int cur_omp_region_idx = 0;
741 /* Create an edge from entry to the first block with executable
742 statements in it. */
743 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
744 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
745 EDGE_FALLTHRU);
747 /* Traverse the basic block array placing edges. */
748 FOR_EACH_BB_FN (bb, cfun)
750 gimple last = last_stmt (bb);
751 bool fallthru;
753 if (bb_to_omp_idx)
754 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
756 if (last)
758 enum gimple_code code = gimple_code (last);
759 switch (code)
761 case GIMPLE_GOTO:
762 if (make_goto_expr_edges (bb))
763 ab_edge_goto.safe_push (bb);
764 fallthru = false;
765 break;
766 case GIMPLE_RETURN:
768 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
769 e->goto_locus = gimple_location (last);
770 fallthru = false;
772 break;
773 case GIMPLE_COND:
774 make_cond_expr_edges (bb);
775 fallthru = false;
776 break;
777 case GIMPLE_SWITCH:
778 make_gimple_switch_edges (bb);
779 fallthru = false;
780 break;
781 case GIMPLE_RESX:
782 make_eh_edges (last);
783 fallthru = false;
784 break;
785 case GIMPLE_EH_DISPATCH:
786 fallthru = make_eh_dispatch_edges (last);
787 break;
789 case GIMPLE_CALL:
790 /* If this function receives a nonlocal goto, then we need to
791 make edges from this call site to all the nonlocal goto
792 handlers. */
793 if (stmt_can_make_abnormal_goto (last))
794 ab_edge_call.safe_push (bb);
796 /* If this statement has reachable exception handlers, then
797 create abnormal edges to them. */
798 make_eh_edges (last);
800 /* BUILTIN_RETURN is really a return statement. */
801 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
803 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
804 fallthru = false;
806 /* Some calls are known not to return. */
807 else
808 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
809 break;
811 case GIMPLE_ASSIGN:
812 /* A GIMPLE_ASSIGN may throw internally and thus be considered
813 control-altering. */
814 if (is_ctrl_altering_stmt (last))
815 make_eh_edges (last);
816 fallthru = true;
817 break;
819 case GIMPLE_ASM:
820 make_gimple_asm_edges (bb);
821 fallthru = true;
822 break;
824 CASE_GIMPLE_OMP:
825 fallthru = make_gimple_omp_edges (bb, &cur_region,
826 &cur_omp_region_idx);
827 if (cur_region && bb_to_omp_idx == NULL)
828 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
829 break;
831 case GIMPLE_TRANSACTION:
833 tree abort_label = gimple_transaction_label (last);
834 if (abort_label)
835 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
836 fallthru = true;
838 break;
840 default:
841 gcc_assert (!stmt_ends_bb_p (last));
842 fallthru = true;
845 else
846 fallthru = true;
848 if (fallthru)
849 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
852 /* Computed gotos are hell to deal with, especially if there are
853 lots of them with a large number of destinations. So we factor
854 them to a common computed goto location before we build the
855 edge list. After we convert back to normal form, we will un-factor
856 the computed gotos since factoring introduces an unwanted jump.
857 For non-local gotos and abnormal edges from calls to calls that return
858 twice or forced labels, factor the abnormal edges too, by having all
859 abnormal edges from the calls go to a common artificial basic block
860 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
861 basic block to all forced labels and calls returning twice.
862 We do this per-OpenMP structured block, because those regions
863 are guaranteed to be single entry single exit by the standard,
864 so it is not allowed to enter or exit such regions abnormally this way,
865 thus all computed gotos, non-local gotos and setjmp/longjmp calls
866 must not transfer control across SESE region boundaries. */
867 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
869 gimple_stmt_iterator gsi;
870 basic_block dispatcher_bb_array[2] = { NULL, NULL };
871 basic_block *dispatcher_bbs = dispatcher_bb_array;
872 int count = n_basic_blocks_for_fn (cfun);
874 if (bb_to_omp_idx)
875 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
877 FOR_EACH_BB_FN (bb, cfun)
879 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
881 gimple label_stmt = gsi_stmt (gsi);
882 tree target;
884 if (gimple_code (label_stmt) != GIMPLE_LABEL)
885 break;
887 target = gimple_label_label (label_stmt);
889 /* Make an edge to every label block that has been marked as a
890 potential target for a computed goto or a non-local goto. */
891 if (FORCED_LABEL (target))
892 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
893 &ab_edge_goto, true);
894 if (DECL_NONLOCAL (target))
896 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
897 &ab_edge_call, false);
898 break;
902 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
903 gsi_next_nondebug (&gsi);
904 if (!gsi_end_p (gsi))
906 /* Make an edge to every setjmp-like call. */
907 gimple call_stmt = gsi_stmt (gsi);
908 if (is_gimple_call (call_stmt)
909 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
910 || gimple_call_builtin_p (call_stmt,
911 BUILT_IN_SETJMP_RECEIVER)))
912 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
913 &ab_edge_call, false);
917 if (bb_to_omp_idx)
918 XDELETE (dispatcher_bbs);
921 XDELETE (bb_to_omp_idx);
923 free_omp_regions ();
925 /* Fold COND_EXPR_COND of each COND_EXPR. */
926 fold_cond_expr_cond ();
929 /* Find the next available discriminator value for LOCUS. The
930 discriminator distinguishes among several basic blocks that
931 share a common locus, allowing for more accurate sample-based
932 profiling. */
934 static int
935 next_discriminator_for_locus (location_t locus)
937 struct locus_discrim_map item;
938 struct locus_discrim_map **slot;
940 item.locus = locus;
941 item.discriminator = 0;
942 slot = discriminator_per_locus->find_slot_with_hash (
943 &item, LOCATION_LINE (locus), INSERT);
944 gcc_assert (slot);
945 if (*slot == HTAB_EMPTY_ENTRY)
947 *slot = XNEW (struct locus_discrim_map);
948 gcc_assert (*slot);
949 (*slot)->locus = locus;
950 (*slot)->discriminator = 0;
952 (*slot)->discriminator++;
953 return (*slot)->discriminator;
956 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
958 static bool
959 same_line_p (location_t locus1, location_t locus2)
961 expanded_location from, to;
963 if (locus1 == locus2)
964 return true;
966 from = expand_location (locus1);
967 to = expand_location (locus2);
969 if (from.line != to.line)
970 return false;
971 if (from.file == to.file)
972 return true;
973 return (from.file != NULL
974 && to.file != NULL
975 && filename_cmp (from.file, to.file) == 0);
978 /* Assign discriminators to each basic block. */
980 static void
981 assign_discriminators (void)
983 basic_block bb;
985 FOR_EACH_BB_FN (bb, cfun)
987 edge e;
988 edge_iterator ei;
989 gimple last = last_stmt (bb);
990 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
992 if (locus == UNKNOWN_LOCATION)
993 continue;
995 FOR_EACH_EDGE (e, ei, bb->succs)
997 gimple first = first_non_label_stmt (e->dest);
998 gimple last = last_stmt (e->dest);
999 if ((first && same_line_p (locus, gimple_location (first)))
1000 || (last && same_line_p (locus, gimple_location (last))))
1002 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1003 bb->discriminator = next_discriminator_for_locus (locus);
1004 else
1005 e->dest->discriminator = next_discriminator_for_locus (locus);
1011 /* Create the edges for a GIMPLE_COND starting at block BB. */
1013 static void
1014 make_cond_expr_edges (basic_block bb)
1016 gimple entry = last_stmt (bb);
1017 gimple then_stmt, else_stmt;
1018 basic_block then_bb, else_bb;
1019 tree then_label, else_label;
1020 edge e;
1022 gcc_assert (entry);
1023 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1025 /* Entry basic blocks for each component. */
1026 then_label = gimple_cond_true_label (entry);
1027 else_label = gimple_cond_false_label (entry);
1028 then_bb = label_to_block (then_label);
1029 else_bb = label_to_block (else_label);
1030 then_stmt = first_stmt (then_bb);
1031 else_stmt = first_stmt (else_bb);
1033 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1034 e->goto_locus = gimple_location (then_stmt);
1035 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1036 if (e)
1037 e->goto_locus = gimple_location (else_stmt);
1039 /* We do not need the labels anymore. */
1040 gimple_cond_set_true_label (entry, NULL_TREE);
1041 gimple_cond_set_false_label (entry, NULL_TREE);
1045 /* Called for each element in the hash table (P) as we delete the
1046 edge to cases hash table.
1048 Clear all the TREE_CHAINs to prevent problems with copying of
1049 SWITCH_EXPRs and structure sharing rules, then free the hash table
1050 element. */
1052 static bool
1053 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
1054 void *data ATTRIBUTE_UNUSED)
1056 tree t, next;
1058 for (t = (tree) *value; t; t = next)
1060 next = CASE_CHAIN (t);
1061 CASE_CHAIN (t) = NULL;
1064 *value = NULL;
1065 return true;
1068 /* Start recording information mapping edges to case labels. */
1070 void
1071 start_recording_case_labels (void)
1073 gcc_assert (edge_to_cases == NULL);
1074 edge_to_cases = pointer_map_create ();
1075 touched_switch_bbs = BITMAP_ALLOC (NULL);
1078 /* Return nonzero if we are recording information for case labels. */
1080 static bool
1081 recording_case_labels_p (void)
1083 return (edge_to_cases != NULL);
1086 /* Stop recording information mapping edges to case labels and
1087 remove any information we have recorded. */
1088 void
1089 end_recording_case_labels (void)
1091 bitmap_iterator bi;
1092 unsigned i;
1093 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
1094 pointer_map_destroy (edge_to_cases);
1095 edge_to_cases = NULL;
1096 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1098 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1099 if (bb)
1101 gimple stmt = last_stmt (bb);
1102 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1103 group_case_labels_stmt (stmt);
1106 BITMAP_FREE (touched_switch_bbs);
1109 /* If we are inside a {start,end}_recording_cases block, then return
1110 a chain of CASE_LABEL_EXPRs from T which reference E.
1112 Otherwise return NULL. */
1114 static tree
1115 get_cases_for_edge (edge e, gimple t)
1117 void **slot;
1118 size_t i, n;
1120 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1121 chains available. Return NULL so the caller can detect this case. */
1122 if (!recording_case_labels_p ())
1123 return NULL;
1125 slot = pointer_map_contains (edge_to_cases, e);
1126 if (slot)
1127 return (tree) *slot;
1129 /* If we did not find E in the hash table, then this must be the first
1130 time we have been queried for information about E & T. Add all the
1131 elements from T to the hash table then perform the query again. */
1133 n = gimple_switch_num_labels (t);
1134 for (i = 0; i < n; i++)
1136 tree elt = gimple_switch_label (t, i);
1137 tree lab = CASE_LABEL (elt);
1138 basic_block label_bb = label_to_block (lab);
1139 edge this_edge = find_edge (e->src, label_bb);
1141 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1142 a new chain. */
1143 slot = pointer_map_insert (edge_to_cases, this_edge);
1144 CASE_CHAIN (elt) = (tree) *slot;
1145 *slot = elt;
1148 return (tree) *pointer_map_contains (edge_to_cases, e);
1151 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1153 static void
1154 make_gimple_switch_edges (basic_block bb)
1156 gimple entry = last_stmt (bb);
1157 size_t i, n;
1159 n = gimple_switch_num_labels (entry);
1161 for (i = 0; i < n; ++i)
1163 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1164 basic_block label_bb = label_to_block (lab);
1165 make_edge (bb, label_bb, 0);
1170 /* Return the basic block holding label DEST. */
1172 basic_block
1173 label_to_block_fn (struct function *ifun, tree dest)
1175 int uid = LABEL_DECL_UID (dest);
1177 /* We would die hard when faced by an undefined label. Emit a label to
1178 the very first basic block. This will hopefully make even the dataflow
1179 and undefined variable warnings quite right. */
1180 if (seen_error () && uid < 0)
1182 gimple_stmt_iterator gsi =
1183 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1184 gimple stmt;
1186 stmt = gimple_build_label (dest);
1187 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1188 uid = LABEL_DECL_UID (dest);
1190 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1191 return NULL;
1192 return (*ifun->cfg->x_label_to_block_map)[uid];
1195 /* Create edges for a goto statement at block BB. Returns true
1196 if abnormal edges should be created. */
1198 static bool
1199 make_goto_expr_edges (basic_block bb)
1201 gimple_stmt_iterator last = gsi_last_bb (bb);
1202 gimple goto_t = gsi_stmt (last);
1204 /* A simple GOTO creates normal edges. */
1205 if (simple_goto_p (goto_t))
1207 tree dest = gimple_goto_dest (goto_t);
1208 basic_block label_bb = label_to_block (dest);
1209 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1210 e->goto_locus = gimple_location (goto_t);
1211 gsi_remove (&last, true);
1212 return false;
1215 /* A computed GOTO creates abnormal edges. */
1216 return true;
1219 /* Create edges for an asm statement with labels at block BB. */
1221 static void
1222 make_gimple_asm_edges (basic_block bb)
1224 gimple stmt = last_stmt (bb);
1225 int i, n = gimple_asm_nlabels (stmt);
1227 for (i = 0; i < n; ++i)
1229 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1230 basic_block label_bb = label_to_block (label);
1231 make_edge (bb, label_bb, 0);
1235 /*---------------------------------------------------------------------------
1236 Flowgraph analysis
1237 ---------------------------------------------------------------------------*/
1239 /* Cleanup useless labels in basic blocks. This is something we wish
1240 to do early because it allows us to group case labels before creating
1241 the edges for the CFG, and it speeds up block statement iterators in
1242 all passes later on.
1243 We rerun this pass after CFG is created, to get rid of the labels that
1244 are no longer referenced. After then we do not run it any more, since
1245 (almost) no new labels should be created. */
1247 /* A map from basic block index to the leading label of that block. */
1248 static struct label_record
1250 /* The label. */
1251 tree label;
1253 /* True if the label is referenced from somewhere. */
1254 bool used;
1255 } *label_for_bb;
1257 /* Given LABEL return the first label in the same basic block. */
1259 static tree
1260 main_block_label (tree label)
1262 basic_block bb = label_to_block (label);
1263 tree main_label = label_for_bb[bb->index].label;
1265 /* label_to_block possibly inserted undefined label into the chain. */
1266 if (!main_label)
1268 label_for_bb[bb->index].label = label;
1269 main_label = label;
1272 label_for_bb[bb->index].used = true;
1273 return main_label;
1276 /* Clean up redundant labels within the exception tree. */
1278 static void
1279 cleanup_dead_labels_eh (void)
1281 eh_landing_pad lp;
1282 eh_region r;
1283 tree lab;
1284 int i;
1286 if (cfun->eh == NULL)
1287 return;
1289 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1290 if (lp && lp->post_landing_pad)
1292 lab = main_block_label (lp->post_landing_pad);
1293 if (lab != lp->post_landing_pad)
1295 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1296 EH_LANDING_PAD_NR (lab) = lp->index;
1300 FOR_ALL_EH_REGION (r)
1301 switch (r->type)
1303 case ERT_CLEANUP:
1304 case ERT_MUST_NOT_THROW:
1305 break;
1307 case ERT_TRY:
1309 eh_catch c;
1310 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1312 lab = c->label;
1313 if (lab)
1314 c->label = main_block_label (lab);
1317 break;
1319 case ERT_ALLOWED_EXCEPTIONS:
1320 lab = r->u.allowed.label;
1321 if (lab)
1322 r->u.allowed.label = main_block_label (lab);
1323 break;
1328 /* Cleanup redundant labels. This is a three-step process:
1329 1) Find the leading label for each block.
1330 2) Redirect all references to labels to the leading labels.
1331 3) Cleanup all useless labels. */
1333 void
1334 cleanup_dead_labels (void)
1336 basic_block bb;
1337 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1339 /* Find a suitable label for each block. We use the first user-defined
1340 label if there is one, or otherwise just the first label we see. */
1341 FOR_EACH_BB_FN (bb, cfun)
1343 gimple_stmt_iterator i;
1345 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1347 tree label;
1348 gimple stmt = gsi_stmt (i);
1350 if (gimple_code (stmt) != GIMPLE_LABEL)
1351 break;
1353 label = gimple_label_label (stmt);
1355 /* If we have not yet seen a label for the current block,
1356 remember this one and see if there are more labels. */
1357 if (!label_for_bb[bb->index].label)
1359 label_for_bb[bb->index].label = label;
1360 continue;
1363 /* If we did see a label for the current block already, but it
1364 is an artificially created label, replace it if the current
1365 label is a user defined label. */
1366 if (!DECL_ARTIFICIAL (label)
1367 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1369 label_for_bb[bb->index].label = label;
1370 break;
1375 /* Now redirect all jumps/branches to the selected label.
1376 First do so for each block ending in a control statement. */
1377 FOR_EACH_BB_FN (bb, cfun)
1379 gimple stmt = last_stmt (bb);
1380 tree label, new_label;
1382 if (!stmt)
1383 continue;
1385 switch (gimple_code (stmt))
1387 case GIMPLE_COND:
1388 label = gimple_cond_true_label (stmt);
1389 if (label)
1391 new_label = main_block_label (label);
1392 if (new_label != label)
1393 gimple_cond_set_true_label (stmt, new_label);
1396 label = gimple_cond_false_label (stmt);
1397 if (label)
1399 new_label = main_block_label (label);
1400 if (new_label != label)
1401 gimple_cond_set_false_label (stmt, new_label);
1403 break;
1405 case GIMPLE_SWITCH:
1407 size_t i, n = gimple_switch_num_labels (stmt);
1409 /* Replace all destination labels. */
1410 for (i = 0; i < n; ++i)
1412 tree case_label = gimple_switch_label (stmt, i);
1413 label = CASE_LABEL (case_label);
1414 new_label = main_block_label (label);
1415 if (new_label != label)
1416 CASE_LABEL (case_label) = new_label;
1418 break;
1421 case GIMPLE_ASM:
1423 int i, n = gimple_asm_nlabels (stmt);
1425 for (i = 0; i < n; ++i)
1427 tree cons = gimple_asm_label_op (stmt, i);
1428 tree label = main_block_label (TREE_VALUE (cons));
1429 TREE_VALUE (cons) = label;
1431 break;
1434 /* We have to handle gotos until they're removed, and we don't
1435 remove them until after we've created the CFG edges. */
1436 case GIMPLE_GOTO:
1437 if (!computed_goto_p (stmt))
1439 label = gimple_goto_dest (stmt);
1440 new_label = main_block_label (label);
1441 if (new_label != label)
1442 gimple_goto_set_dest (stmt, new_label);
1444 break;
1446 case GIMPLE_TRANSACTION:
1448 tree label = gimple_transaction_label (stmt);
1449 if (label)
1451 tree new_label = main_block_label (label);
1452 if (new_label != label)
1453 gimple_transaction_set_label (stmt, new_label);
1456 break;
1458 default:
1459 break;
1463 /* Do the same for the exception region tree labels. */
1464 cleanup_dead_labels_eh ();
1466 /* Finally, purge dead labels. All user-defined labels and labels that
1467 can be the target of non-local gotos and labels which have their
1468 address taken are preserved. */
1469 FOR_EACH_BB_FN (bb, cfun)
1471 gimple_stmt_iterator i;
1472 tree label_for_this_bb = label_for_bb[bb->index].label;
1474 if (!label_for_this_bb)
1475 continue;
1477 /* If the main label of the block is unused, we may still remove it. */
1478 if (!label_for_bb[bb->index].used)
1479 label_for_this_bb = NULL;
1481 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1483 tree label;
1484 gimple stmt = gsi_stmt (i);
1486 if (gimple_code (stmt) != GIMPLE_LABEL)
1487 break;
1489 label = gimple_label_label (stmt);
1491 if (label == label_for_this_bb
1492 || !DECL_ARTIFICIAL (label)
1493 || DECL_NONLOCAL (label)
1494 || FORCED_LABEL (label))
1495 gsi_next (&i);
1496 else
1497 gsi_remove (&i, true);
1501 free (label_for_bb);
1504 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1505 the ones jumping to the same label.
1506 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1508 void
1509 group_case_labels_stmt (gimple stmt)
1511 int old_size = gimple_switch_num_labels (stmt);
1512 int i, j, new_size = old_size;
1513 basic_block default_bb = NULL;
1515 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1517 /* Look for possible opportunities to merge cases. */
1518 i = 1;
1519 while (i < old_size)
1521 tree base_case, base_high;
1522 basic_block base_bb;
1524 base_case = gimple_switch_label (stmt, i);
1526 gcc_assert (base_case);
1527 base_bb = label_to_block (CASE_LABEL (base_case));
1529 /* Discard cases that have the same destination as the
1530 default case. */
1531 if (base_bb == default_bb)
1533 gimple_switch_set_label (stmt, i, NULL_TREE);
1534 i++;
1535 new_size--;
1536 continue;
1539 base_high = CASE_HIGH (base_case)
1540 ? CASE_HIGH (base_case)
1541 : CASE_LOW (base_case);
1542 i++;
1544 /* Try to merge case labels. Break out when we reach the end
1545 of the label vector or when we cannot merge the next case
1546 label with the current one. */
1547 while (i < old_size)
1549 tree merge_case = gimple_switch_label (stmt, i);
1550 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1551 wide_int bhp1 = wi::add (base_high, 1);
1553 /* Merge the cases if they jump to the same place,
1554 and their ranges are consecutive. */
1555 if (merge_bb == base_bb
1556 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1558 base_high = CASE_HIGH (merge_case) ?
1559 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1560 CASE_HIGH (base_case) = base_high;
1561 gimple_switch_set_label (stmt, i, NULL_TREE);
1562 new_size--;
1563 i++;
1565 else
1566 break;
1570 /* Compress the case labels in the label vector, and adjust the
1571 length of the vector. */
1572 for (i = 0, j = 0; i < new_size; i++)
1574 while (! gimple_switch_label (stmt, j))
1575 j++;
1576 gimple_switch_set_label (stmt, i,
1577 gimple_switch_label (stmt, j++));
1580 gcc_assert (new_size <= old_size);
1581 gimple_switch_set_num_labels (stmt, new_size);
1584 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1585 and scan the sorted vector of cases. Combine the ones jumping to the
1586 same label. */
1588 void
1589 group_case_labels (void)
1591 basic_block bb;
1593 FOR_EACH_BB_FN (bb, cfun)
1595 gimple stmt = last_stmt (bb);
1596 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1597 group_case_labels_stmt (stmt);
1601 /* Checks whether we can merge block B into block A. */
1603 static bool
1604 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1606 gimple stmt;
1607 gimple_stmt_iterator gsi;
1609 if (!single_succ_p (a))
1610 return false;
1612 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1613 return false;
1615 if (single_succ (a) != b)
1616 return false;
1618 if (!single_pred_p (b))
1619 return false;
1621 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1622 return false;
1624 /* If A ends by a statement causing exceptions or something similar, we
1625 cannot merge the blocks. */
1626 stmt = last_stmt (a);
1627 if (stmt && stmt_ends_bb_p (stmt))
1628 return false;
1630 /* Do not allow a block with only a non-local label to be merged. */
1631 if (stmt
1632 && gimple_code (stmt) == GIMPLE_LABEL
1633 && DECL_NONLOCAL (gimple_label_label (stmt)))
1634 return false;
1636 /* Examine the labels at the beginning of B. */
1637 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1639 tree lab;
1640 stmt = gsi_stmt (gsi);
1641 if (gimple_code (stmt) != GIMPLE_LABEL)
1642 break;
1643 lab = gimple_label_label (stmt);
1645 /* Do not remove user forced labels or for -O0 any user labels. */
1646 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1647 return false;
1650 /* Protect the loop latches. */
1651 if (current_loops && b->loop_father->latch == b)
1652 return false;
1654 /* It must be possible to eliminate all phi nodes in B. If ssa form
1655 is not up-to-date and a name-mapping is registered, we cannot eliminate
1656 any phis. Symbols marked for renaming are never a problem though. */
1657 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1659 gimple phi = gsi_stmt (gsi);
1660 /* Technically only new names matter. */
1661 if (name_registered_for_update_p (PHI_RESULT (phi)))
1662 return false;
1665 /* When not optimizing, don't merge if we'd lose goto_locus. */
1666 if (!optimize
1667 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1669 location_t goto_locus = single_succ_edge (a)->goto_locus;
1670 gimple_stmt_iterator prev, next;
1671 prev = gsi_last_nondebug_bb (a);
1672 next = gsi_after_labels (b);
1673 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1674 gsi_next_nondebug (&next);
1675 if ((gsi_end_p (prev)
1676 || gimple_location (gsi_stmt (prev)) != goto_locus)
1677 && (gsi_end_p (next)
1678 || gimple_location (gsi_stmt (next)) != goto_locus))
1679 return false;
1682 return true;
1685 /* Replaces all uses of NAME by VAL. */
1687 void
1688 replace_uses_by (tree name, tree val)
1690 imm_use_iterator imm_iter;
1691 use_operand_p use;
1692 gimple stmt;
1693 edge e;
1695 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1697 /* Mark the block if we change the last stmt in it. */
1698 if (cfgcleanup_altered_bbs
1699 && stmt_ends_bb_p (stmt))
1700 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1702 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1704 replace_exp (use, val);
1706 if (gimple_code (stmt) == GIMPLE_PHI)
1708 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1709 if (e->flags & EDGE_ABNORMAL)
1711 /* This can only occur for virtual operands, since
1712 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1713 would prevent replacement. */
1714 gcc_checking_assert (virtual_operand_p (name));
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1720 if (gimple_code (stmt) != GIMPLE_PHI)
1722 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1723 gimple orig_stmt = stmt;
1724 size_t i;
1726 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1727 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1728 only change sth from non-invariant to invariant, and only
1729 when propagating constants. */
1730 if (is_gimple_min_invariant (val))
1731 for (i = 0; i < gimple_num_ops (stmt); i++)
1733 tree op = gimple_op (stmt, i);
1734 /* Operands may be empty here. For example, the labels
1735 of a GIMPLE_COND are nulled out following the creation
1736 of the corresponding CFG edges. */
1737 if (op && TREE_CODE (op) == ADDR_EXPR)
1738 recompute_tree_invariant_for_addr_expr (op);
1741 if (fold_stmt (&gsi))
1742 stmt = gsi_stmt (gsi);
1744 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1745 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1747 update_stmt (stmt);
1751 gcc_checking_assert (has_zero_uses (name));
1753 /* Also update the trees stored in loop structures. */
1754 if (current_loops)
1756 struct loop *loop;
1758 FOR_EACH_LOOP (loop, 0)
1760 substitute_in_loop_info (loop, name, val);
1765 /* Merge block B into block A. */
1767 static void
1768 gimple_merge_blocks (basic_block a, basic_block b)
1770 gimple_stmt_iterator last, gsi, psi;
1772 if (dump_file)
1773 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1775 /* Remove all single-valued PHI nodes from block B of the form
1776 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1777 gsi = gsi_last_bb (a);
1778 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1780 gimple phi = gsi_stmt (psi);
1781 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1782 gimple copy;
1783 bool may_replace_uses = (virtual_operand_p (def)
1784 || may_propagate_copy (def, use));
1786 /* In case we maintain loop closed ssa form, do not propagate arguments
1787 of loop exit phi nodes. */
1788 if (current_loops
1789 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1790 && !virtual_operand_p (def)
1791 && TREE_CODE (use) == SSA_NAME
1792 && a->loop_father != b->loop_father)
1793 may_replace_uses = false;
1795 if (!may_replace_uses)
1797 gcc_assert (!virtual_operand_p (def));
1799 /* Note that just emitting the copies is fine -- there is no problem
1800 with ordering of phi nodes. This is because A is the single
1801 predecessor of B, therefore results of the phi nodes cannot
1802 appear as arguments of the phi nodes. */
1803 copy = gimple_build_assign (def, use);
1804 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1805 remove_phi_node (&psi, false);
1807 else
1809 /* If we deal with a PHI for virtual operands, we can simply
1810 propagate these without fussing with folding or updating
1811 the stmt. */
1812 if (virtual_operand_p (def))
1814 imm_use_iterator iter;
1815 use_operand_p use_p;
1816 gimple stmt;
1818 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1819 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1820 SET_USE (use_p, use);
1822 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1823 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1825 else
1826 replace_uses_by (def, use);
1828 remove_phi_node (&psi, true);
1832 /* Ensure that B follows A. */
1833 move_block_after (b, a);
1835 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1836 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1838 /* Remove labels from B and set gimple_bb to A for other statements. */
1839 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1841 gimple stmt = gsi_stmt (gsi);
1842 if (gimple_code (stmt) == GIMPLE_LABEL)
1844 tree label = gimple_label_label (stmt);
1845 int lp_nr;
1847 gsi_remove (&gsi, false);
1849 /* Now that we can thread computed gotos, we might have
1850 a situation where we have a forced label in block B
1851 However, the label at the start of block B might still be
1852 used in other ways (think about the runtime checking for
1853 Fortran assigned gotos). So we can not just delete the
1854 label. Instead we move the label to the start of block A. */
1855 if (FORCED_LABEL (label))
1857 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1858 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1860 /* Other user labels keep around in a form of a debug stmt. */
1861 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1863 gimple dbg = gimple_build_debug_bind (label,
1864 integer_zero_node,
1865 stmt);
1866 gimple_debug_bind_reset_value (dbg);
1867 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1870 lp_nr = EH_LANDING_PAD_NR (label);
1871 if (lp_nr)
1873 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1874 lp->post_landing_pad = NULL;
1877 else
1879 gimple_set_bb (stmt, a);
1880 gsi_next (&gsi);
1884 /* When merging two BBs, if their counts are different, the larger count
1885 is selected as the new bb count. This is to handle inconsistent
1886 profiles. */
1887 if (a->loop_father == b->loop_father)
1889 a->count = MAX (a->count, b->count);
1890 a->frequency = MAX (a->frequency, b->frequency);
1893 /* Merge the sequences. */
1894 last = gsi_last_bb (a);
1895 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1896 set_bb_seq (b, NULL);
1898 if (cfgcleanup_altered_bbs)
1899 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1903 /* Return the one of two successors of BB that is not reachable by a
1904 complex edge, if there is one. Else, return BB. We use
1905 this in optimizations that use post-dominators for their heuristics,
1906 to catch the cases in C++ where function calls are involved. */
1908 basic_block
1909 single_noncomplex_succ (basic_block bb)
1911 edge e0, e1;
1912 if (EDGE_COUNT (bb->succs) != 2)
1913 return bb;
1915 e0 = EDGE_SUCC (bb, 0);
1916 e1 = EDGE_SUCC (bb, 1);
1917 if (e0->flags & EDGE_COMPLEX)
1918 return e1->dest;
1919 if (e1->flags & EDGE_COMPLEX)
1920 return e0->dest;
1922 return bb;
1925 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1927 void
1928 notice_special_calls (gimple call)
1930 int flags = gimple_call_flags (call);
1932 if (flags & ECF_MAY_BE_ALLOCA)
1933 cfun->calls_alloca = true;
1934 if (flags & ECF_RETURNS_TWICE)
1935 cfun->calls_setjmp = true;
1939 /* Clear flags set by notice_special_calls. Used by dead code removal
1940 to update the flags. */
1942 void
1943 clear_special_calls (void)
1945 cfun->calls_alloca = false;
1946 cfun->calls_setjmp = false;
1949 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1951 static void
1952 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1954 /* Since this block is no longer reachable, we can just delete all
1955 of its PHI nodes. */
1956 remove_phi_nodes (bb);
1958 /* Remove edges to BB's successors. */
1959 while (EDGE_COUNT (bb->succs) > 0)
1960 remove_edge (EDGE_SUCC (bb, 0));
1964 /* Remove statements of basic block BB. */
1966 static void
1967 remove_bb (basic_block bb)
1969 gimple_stmt_iterator i;
1971 if (dump_file)
1973 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1974 if (dump_flags & TDF_DETAILS)
1976 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
1977 fprintf (dump_file, "\n");
1981 if (current_loops)
1983 struct loop *loop = bb->loop_father;
1985 /* If a loop gets removed, clean up the information associated
1986 with it. */
1987 if (loop->latch == bb
1988 || loop->header == bb)
1989 free_numbers_of_iterations_estimates_loop (loop);
1992 /* Remove all the instructions in the block. */
1993 if (bb_seq (bb) != NULL)
1995 /* Walk backwards so as to get a chance to substitute all
1996 released DEFs into debug stmts. See
1997 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1998 details. */
1999 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2001 gimple stmt = gsi_stmt (i);
2002 if (gimple_code (stmt) == GIMPLE_LABEL
2003 && (FORCED_LABEL (gimple_label_label (stmt))
2004 || DECL_NONLOCAL (gimple_label_label (stmt))))
2006 basic_block new_bb;
2007 gimple_stmt_iterator new_gsi;
2009 /* A non-reachable non-local label may still be referenced.
2010 But it no longer needs to carry the extra semantics of
2011 non-locality. */
2012 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2014 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2015 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2018 new_bb = bb->prev_bb;
2019 new_gsi = gsi_start_bb (new_bb);
2020 gsi_remove (&i, false);
2021 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2023 else
2025 /* Release SSA definitions if we are in SSA. Note that we
2026 may be called when not in SSA. For example,
2027 final_cleanup calls this function via
2028 cleanup_tree_cfg. */
2029 if (gimple_in_ssa_p (cfun))
2030 release_defs (stmt);
2032 gsi_remove (&i, true);
2035 if (gsi_end_p (i))
2036 i = gsi_last_bb (bb);
2037 else
2038 gsi_prev (&i);
2042 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2043 bb->il.gimple.seq = NULL;
2044 bb->il.gimple.phi_nodes = NULL;
2048 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2049 predicate VAL, return the edge that will be taken out of the block.
2050 If VAL does not match a unique edge, NULL is returned. */
2052 edge
2053 find_taken_edge (basic_block bb, tree val)
2055 gimple stmt;
2057 stmt = last_stmt (bb);
2059 gcc_assert (stmt);
2060 gcc_assert (is_ctrl_stmt (stmt));
2062 if (val == NULL)
2063 return NULL;
2065 if (!is_gimple_min_invariant (val))
2066 return NULL;
2068 if (gimple_code (stmt) == GIMPLE_COND)
2069 return find_taken_edge_cond_expr (bb, val);
2071 if (gimple_code (stmt) == GIMPLE_SWITCH)
2072 return find_taken_edge_switch_expr (bb, val);
2074 if (computed_goto_p (stmt))
2076 /* Only optimize if the argument is a label, if the argument is
2077 not a label then we can not construct a proper CFG.
2079 It may be the case that we only need to allow the LABEL_REF to
2080 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2081 appear inside a LABEL_EXPR just to be safe. */
2082 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2083 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2084 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2085 return NULL;
2088 gcc_unreachable ();
2091 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2092 statement, determine which of the outgoing edges will be taken out of the
2093 block. Return NULL if either edge may be taken. */
2095 static edge
2096 find_taken_edge_computed_goto (basic_block bb, tree val)
2098 basic_block dest;
2099 edge e = NULL;
2101 dest = label_to_block (val);
2102 if (dest)
2104 e = find_edge (bb, dest);
2105 gcc_assert (e != NULL);
2108 return e;
2111 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2112 statement, determine which of the two edges will be taken out of the
2113 block. Return NULL if either edge may be taken. */
2115 static edge
2116 find_taken_edge_cond_expr (basic_block bb, tree val)
2118 edge true_edge, false_edge;
2120 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2122 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2123 return (integer_zerop (val) ? false_edge : true_edge);
2126 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2127 statement, determine which edge will be taken out of the block. Return
2128 NULL if any edge may be taken. */
2130 static edge
2131 find_taken_edge_switch_expr (basic_block bb, tree val)
2133 basic_block dest_bb;
2134 edge e;
2135 gimple switch_stmt;
2136 tree taken_case;
2138 switch_stmt = last_stmt (bb);
2139 taken_case = find_case_label_for_value (switch_stmt, val);
2140 dest_bb = label_to_block (CASE_LABEL (taken_case));
2142 e = find_edge (bb, dest_bb);
2143 gcc_assert (e);
2144 return e;
2148 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2149 We can make optimal use here of the fact that the case labels are
2150 sorted: We can do a binary search for a case matching VAL. */
2152 static tree
2153 find_case_label_for_value (gimple switch_stmt, tree val)
2155 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2156 tree default_case = gimple_switch_default_label (switch_stmt);
2158 for (low = 0, high = n; high - low > 1; )
2160 size_t i = (high + low) / 2;
2161 tree t = gimple_switch_label (switch_stmt, i);
2162 int cmp;
2164 /* Cache the result of comparing CASE_LOW and val. */
2165 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2167 if (cmp > 0)
2168 high = i;
2169 else
2170 low = i;
2172 if (CASE_HIGH (t) == NULL)
2174 /* A singe-valued case label. */
2175 if (cmp == 0)
2176 return t;
2178 else
2180 /* A case range. We can only handle integer ranges. */
2181 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2182 return t;
2186 return default_case;
2190 /* Dump a basic block on stderr. */
2192 void
2193 gimple_debug_bb (basic_block bb)
2195 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2199 /* Dump basic block with index N on stderr. */
2201 basic_block
2202 gimple_debug_bb_n (int n)
2204 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2205 return BASIC_BLOCK_FOR_FN (cfun, n);
2209 /* Dump the CFG on stderr.
2211 FLAGS are the same used by the tree dumping functions
2212 (see TDF_* in dumpfile.h). */
2214 void
2215 gimple_debug_cfg (int flags)
2217 gimple_dump_cfg (stderr, flags);
2221 /* Dump the program showing basic block boundaries on the given FILE.
2223 FLAGS are the same used by the tree dumping functions (see TDF_* in
2224 tree.h). */
2226 void
2227 gimple_dump_cfg (FILE *file, int flags)
2229 if (flags & TDF_DETAILS)
2231 dump_function_header (file, current_function_decl, flags);
2232 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2233 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2234 last_basic_block_for_fn (cfun));
2236 brief_dump_cfg (file, flags | TDF_COMMENT);
2237 fprintf (file, "\n");
2240 if (flags & TDF_STATS)
2241 dump_cfg_stats (file);
2243 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2247 /* Dump CFG statistics on FILE. */
2249 void
2250 dump_cfg_stats (FILE *file)
2252 static long max_num_merged_labels = 0;
2253 unsigned long size, total = 0;
2254 long num_edges;
2255 basic_block bb;
2256 const char * const fmt_str = "%-30s%-13s%12s\n";
2257 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2258 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2259 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2260 const char *funcname = current_function_name ();
2262 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2264 fprintf (file, "---------------------------------------------------------\n");
2265 fprintf (file, fmt_str, "", " Number of ", "Memory");
2266 fprintf (file, fmt_str, "", " instances ", "used ");
2267 fprintf (file, "---------------------------------------------------------\n");
2269 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2270 total += size;
2271 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2272 SCALE (size), LABEL (size));
2274 num_edges = 0;
2275 FOR_EACH_BB_FN (bb, cfun)
2276 num_edges += EDGE_COUNT (bb->succs);
2277 size = num_edges * sizeof (struct edge_def);
2278 total += size;
2279 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2281 fprintf (file, "---------------------------------------------------------\n");
2282 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2283 LABEL (total));
2284 fprintf (file, "---------------------------------------------------------\n");
2285 fprintf (file, "\n");
2287 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2288 max_num_merged_labels = cfg_stats.num_merged_labels;
2290 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2291 cfg_stats.num_merged_labels, max_num_merged_labels);
2293 fprintf (file, "\n");
2297 /* Dump CFG statistics on stderr. Keep extern so that it's always
2298 linked in the final executable. */
2300 DEBUG_FUNCTION void
2301 debug_cfg_stats (void)
2303 dump_cfg_stats (stderr);
2306 /*---------------------------------------------------------------------------
2307 Miscellaneous helpers
2308 ---------------------------------------------------------------------------*/
2310 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2311 flow. Transfers of control flow associated with EH are excluded. */
2313 static bool
2314 call_can_make_abnormal_goto (gimple t)
2316 /* If the function has no non-local labels, then a call cannot make an
2317 abnormal transfer of control. */
2318 if (!cfun->has_nonlocal_label
2319 && !cfun->calls_setjmp)
2320 return false;
2322 /* Likewise if the call has no side effects. */
2323 if (!gimple_has_side_effects (t))
2324 return false;
2326 /* Likewise if the called function is leaf. */
2327 if (gimple_call_flags (t) & ECF_LEAF)
2328 return false;
2330 return true;
2334 /* Return true if T can make an abnormal transfer of control flow.
2335 Transfers of control flow associated with EH are excluded. */
2337 bool
2338 stmt_can_make_abnormal_goto (gimple t)
2340 if (computed_goto_p (t))
2341 return true;
2342 if (is_gimple_call (t))
2343 return call_can_make_abnormal_goto (t);
2344 return false;
2348 /* Return true if T represents a stmt that always transfers control. */
2350 bool
2351 is_ctrl_stmt (gimple t)
2353 switch (gimple_code (t))
2355 case GIMPLE_COND:
2356 case GIMPLE_SWITCH:
2357 case GIMPLE_GOTO:
2358 case GIMPLE_RETURN:
2359 case GIMPLE_RESX:
2360 return true;
2361 default:
2362 return false;
2367 /* Return true if T is a statement that may alter the flow of control
2368 (e.g., a call to a non-returning function). */
2370 bool
2371 is_ctrl_altering_stmt (gimple t)
2373 gcc_assert (t);
2375 switch (gimple_code (t))
2377 case GIMPLE_CALL:
2379 int flags = gimple_call_flags (t);
2381 /* A call alters control flow if it can make an abnormal goto. */
2382 if (call_can_make_abnormal_goto (t))
2383 return true;
2385 /* A call also alters control flow if it does not return. */
2386 if (flags & ECF_NORETURN)
2387 return true;
2389 /* TM ending statements have backedges out of the transaction.
2390 Return true so we split the basic block containing them.
2391 Note that the TM_BUILTIN test is merely an optimization. */
2392 if ((flags & ECF_TM_BUILTIN)
2393 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2394 return true;
2396 /* BUILT_IN_RETURN call is same as return statement. */
2397 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2398 return true;
2400 break;
2402 case GIMPLE_EH_DISPATCH:
2403 /* EH_DISPATCH branches to the individual catch handlers at
2404 this level of a try or allowed-exceptions region. It can
2405 fallthru to the next statement as well. */
2406 return true;
2408 case GIMPLE_ASM:
2409 if (gimple_asm_nlabels (t) > 0)
2410 return true;
2411 break;
2413 CASE_GIMPLE_OMP:
2414 /* OpenMP directives alter control flow. */
2415 return true;
2417 case GIMPLE_TRANSACTION:
2418 /* A transaction start alters control flow. */
2419 return true;
2421 default:
2422 break;
2425 /* If a statement can throw, it alters control flow. */
2426 return stmt_can_throw_internal (t);
2430 /* Return true if T is a simple local goto. */
2432 bool
2433 simple_goto_p (gimple t)
2435 return (gimple_code (t) == GIMPLE_GOTO
2436 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2440 /* Return true if STMT should start a new basic block. PREV_STMT is
2441 the statement preceding STMT. It is used when STMT is a label or a
2442 case label. Labels should only start a new basic block if their
2443 previous statement wasn't a label. Otherwise, sequence of labels
2444 would generate unnecessary basic blocks that only contain a single
2445 label. */
2447 static inline bool
2448 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2450 if (stmt == NULL)
2451 return false;
2453 /* Labels start a new basic block only if the preceding statement
2454 wasn't a label of the same type. This prevents the creation of
2455 consecutive blocks that have nothing but a single label. */
2456 if (gimple_code (stmt) == GIMPLE_LABEL)
2458 /* Nonlocal and computed GOTO targets always start a new block. */
2459 if (DECL_NONLOCAL (gimple_label_label (stmt))
2460 || FORCED_LABEL (gimple_label_label (stmt)))
2461 return true;
2463 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2465 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2466 return true;
2468 cfg_stats.num_merged_labels++;
2469 return false;
2471 else
2472 return true;
2474 else if (gimple_code (stmt) == GIMPLE_CALL
2475 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2476 /* setjmp acts similar to a nonlocal GOTO target and thus should
2477 start a new block. */
2478 return true;
2480 return false;
2484 /* Return true if T should end a basic block. */
2486 bool
2487 stmt_ends_bb_p (gimple t)
2489 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2492 /* Remove block annotations and other data structures. */
2494 void
2495 delete_tree_cfg_annotations (void)
2497 vec_free (label_to_block_map_for_fn (cfun));
2501 /* Return the first statement in basic block BB. */
2503 gimple
2504 first_stmt (basic_block bb)
2506 gimple_stmt_iterator i = gsi_start_bb (bb);
2507 gimple stmt = NULL;
2509 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2511 gsi_next (&i);
2512 stmt = NULL;
2514 return stmt;
2517 /* Return the first non-label statement in basic block BB. */
2519 static gimple
2520 first_non_label_stmt (basic_block bb)
2522 gimple_stmt_iterator i = gsi_start_bb (bb);
2523 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2524 gsi_next (&i);
2525 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2528 /* Return the last statement in basic block BB. */
2530 gimple
2531 last_stmt (basic_block bb)
2533 gimple_stmt_iterator i = gsi_last_bb (bb);
2534 gimple stmt = NULL;
2536 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2538 gsi_prev (&i);
2539 stmt = NULL;
2541 return stmt;
2544 /* Return the last statement of an otherwise empty block. Return NULL
2545 if the block is totally empty, or if it contains more than one
2546 statement. */
2548 gimple
2549 last_and_only_stmt (basic_block bb)
2551 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2552 gimple last, prev;
2554 if (gsi_end_p (i))
2555 return NULL;
2557 last = gsi_stmt (i);
2558 gsi_prev_nondebug (&i);
2559 if (gsi_end_p (i))
2560 return last;
2562 /* Empty statements should no longer appear in the instruction stream.
2563 Everything that might have appeared before should be deleted by
2564 remove_useless_stmts, and the optimizers should just gsi_remove
2565 instead of smashing with build_empty_stmt.
2567 Thus the only thing that should appear here in a block containing
2568 one executable statement is a label. */
2569 prev = gsi_stmt (i);
2570 if (gimple_code (prev) == GIMPLE_LABEL)
2571 return last;
2572 else
2573 return NULL;
2576 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2578 static void
2579 reinstall_phi_args (edge new_edge, edge old_edge)
2581 edge_var_map_vector *v;
2582 edge_var_map *vm;
2583 int i;
2584 gimple_stmt_iterator phis;
2586 v = redirect_edge_var_map_vector (old_edge);
2587 if (!v)
2588 return;
2590 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2591 v->iterate (i, &vm) && !gsi_end_p (phis);
2592 i++, gsi_next (&phis))
2594 gimple phi = gsi_stmt (phis);
2595 tree result = redirect_edge_var_map_result (vm);
2596 tree arg = redirect_edge_var_map_def (vm);
2598 gcc_assert (result == gimple_phi_result (phi));
2600 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2603 redirect_edge_var_map_clear (old_edge);
2606 /* Returns the basic block after which the new basic block created
2607 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2608 near its "logical" location. This is of most help to humans looking
2609 at debugging dumps. */
2611 static basic_block
2612 split_edge_bb_loc (edge edge_in)
2614 basic_block dest = edge_in->dest;
2615 basic_block dest_prev = dest->prev_bb;
2617 if (dest_prev)
2619 edge e = find_edge (dest_prev, dest);
2620 if (e && !(e->flags & EDGE_COMPLEX))
2621 return edge_in->src;
2623 return dest_prev;
2626 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2627 Abort on abnormal edges. */
2629 static basic_block
2630 gimple_split_edge (edge edge_in)
2632 basic_block new_bb, after_bb, dest;
2633 edge new_edge, e;
2635 /* Abnormal edges cannot be split. */
2636 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2638 dest = edge_in->dest;
2640 after_bb = split_edge_bb_loc (edge_in);
2642 new_bb = create_empty_bb (after_bb);
2643 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2644 new_bb->count = edge_in->count;
2645 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2646 new_edge->probability = REG_BR_PROB_BASE;
2647 new_edge->count = edge_in->count;
2649 e = redirect_edge_and_branch (edge_in, new_bb);
2650 gcc_assert (e == edge_in);
2651 reinstall_phi_args (new_edge, e);
2653 return new_bb;
2657 /* Verify properties of the address expression T with base object BASE. */
2659 static tree
2660 verify_address (tree t, tree base)
2662 bool old_constant;
2663 bool old_side_effects;
2664 bool new_constant;
2665 bool new_side_effects;
2667 old_constant = TREE_CONSTANT (t);
2668 old_side_effects = TREE_SIDE_EFFECTS (t);
2670 recompute_tree_invariant_for_addr_expr (t);
2671 new_side_effects = TREE_SIDE_EFFECTS (t);
2672 new_constant = TREE_CONSTANT (t);
2674 if (old_constant != new_constant)
2676 error ("constant not recomputed when ADDR_EXPR changed");
2677 return t;
2679 if (old_side_effects != new_side_effects)
2681 error ("side effects not recomputed when ADDR_EXPR changed");
2682 return t;
2685 if (!(TREE_CODE (base) == VAR_DECL
2686 || TREE_CODE (base) == PARM_DECL
2687 || TREE_CODE (base) == RESULT_DECL))
2688 return NULL_TREE;
2690 if (DECL_GIMPLE_REG_P (base))
2692 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2693 return base;
2696 return NULL_TREE;
2699 /* Callback for walk_tree, check that all elements with address taken are
2700 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2701 inside a PHI node. */
2703 static tree
2704 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2706 tree t = *tp, x;
2708 if (TYPE_P (t))
2709 *walk_subtrees = 0;
2711 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2712 #define CHECK_OP(N, MSG) \
2713 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2714 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2716 switch (TREE_CODE (t))
2718 case SSA_NAME:
2719 if (SSA_NAME_IN_FREE_LIST (t))
2721 error ("SSA name in freelist but still referenced");
2722 return *tp;
2724 break;
2726 case INDIRECT_REF:
2727 error ("INDIRECT_REF in gimple IL");
2728 return t;
2730 case MEM_REF:
2731 x = TREE_OPERAND (t, 0);
2732 if (!POINTER_TYPE_P (TREE_TYPE (x))
2733 || !is_gimple_mem_ref_addr (x))
2735 error ("invalid first operand of MEM_REF");
2736 return x;
2738 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2739 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2741 error ("invalid offset operand of MEM_REF");
2742 return TREE_OPERAND (t, 1);
2744 if (TREE_CODE (x) == ADDR_EXPR
2745 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2746 return x;
2747 *walk_subtrees = 0;
2748 break;
2750 case ASSERT_EXPR:
2751 x = fold (ASSERT_EXPR_COND (t));
2752 if (x == boolean_false_node)
2754 error ("ASSERT_EXPR with an always-false condition");
2755 return *tp;
2757 break;
2759 case MODIFY_EXPR:
2760 error ("MODIFY_EXPR not expected while having tuples");
2761 return *tp;
2763 case ADDR_EXPR:
2765 tree tem;
2767 gcc_assert (is_gimple_address (t));
2769 /* Skip any references (they will be checked when we recurse down the
2770 tree) and ensure that any variable used as a prefix is marked
2771 addressable. */
2772 for (x = TREE_OPERAND (t, 0);
2773 handled_component_p (x);
2774 x = TREE_OPERAND (x, 0))
2777 if ((tem = verify_address (t, x)))
2778 return tem;
2780 if (!(TREE_CODE (x) == VAR_DECL
2781 || TREE_CODE (x) == PARM_DECL
2782 || TREE_CODE (x) == RESULT_DECL))
2783 return NULL;
2785 if (!TREE_ADDRESSABLE (x))
2787 error ("address taken, but ADDRESSABLE bit not set");
2788 return x;
2791 break;
2794 case COND_EXPR:
2795 x = COND_EXPR_COND (t);
2796 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2798 error ("non-integral used in condition");
2799 return x;
2801 if (!is_gimple_condexpr (x))
2803 error ("invalid conditional operand");
2804 return x;
2806 break;
2808 case NON_LVALUE_EXPR:
2809 case TRUTH_NOT_EXPR:
2810 gcc_unreachable ();
2812 CASE_CONVERT:
2813 case FIX_TRUNC_EXPR:
2814 case FLOAT_EXPR:
2815 case NEGATE_EXPR:
2816 case ABS_EXPR:
2817 case BIT_NOT_EXPR:
2818 CHECK_OP (0, "invalid operand to unary operator");
2819 break;
2821 case REALPART_EXPR:
2822 case IMAGPART_EXPR:
2823 case BIT_FIELD_REF:
2824 if (!is_gimple_reg_type (TREE_TYPE (t)))
2826 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2827 return t;
2830 if (TREE_CODE (t) == BIT_FIELD_REF)
2832 tree t0 = TREE_OPERAND (t, 0);
2833 tree t1 = TREE_OPERAND (t, 1);
2834 tree t2 = TREE_OPERAND (t, 2);
2835 if (!tree_fits_uhwi_p (t1)
2836 || !tree_fits_uhwi_p (t2))
2838 error ("invalid position or size operand to BIT_FIELD_REF");
2839 return t;
2841 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2842 && (TYPE_PRECISION (TREE_TYPE (t))
2843 != tree_to_uhwi (t1)))
2845 error ("integral result type precision does not match "
2846 "field size of BIT_FIELD_REF");
2847 return t;
2849 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2850 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2851 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2852 != tree_to_uhwi (t1)))
2854 error ("mode precision of non-integral result does not "
2855 "match field size of BIT_FIELD_REF");
2856 return t;
2858 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2859 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2860 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2862 error ("position plus size exceeds size of referenced object in "
2863 "BIT_FIELD_REF");
2864 return t;
2867 t = TREE_OPERAND (t, 0);
2869 /* Fall-through. */
2870 case COMPONENT_REF:
2871 case ARRAY_REF:
2872 case ARRAY_RANGE_REF:
2873 case VIEW_CONVERT_EXPR:
2874 /* We have a nest of references. Verify that each of the operands
2875 that determine where to reference is either a constant or a variable,
2876 verify that the base is valid, and then show we've already checked
2877 the subtrees. */
2878 while (handled_component_p (t))
2880 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2881 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2882 else if (TREE_CODE (t) == ARRAY_REF
2883 || TREE_CODE (t) == ARRAY_RANGE_REF)
2885 CHECK_OP (1, "invalid array index");
2886 if (TREE_OPERAND (t, 2))
2887 CHECK_OP (2, "invalid array lower bound");
2888 if (TREE_OPERAND (t, 3))
2889 CHECK_OP (3, "invalid array stride");
2891 else if (TREE_CODE (t) == BIT_FIELD_REF
2892 || TREE_CODE (t) == REALPART_EXPR
2893 || TREE_CODE (t) == IMAGPART_EXPR)
2895 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2896 "REALPART_EXPR");
2897 return t;
2900 t = TREE_OPERAND (t, 0);
2903 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2905 error ("invalid reference prefix");
2906 return t;
2908 *walk_subtrees = 0;
2909 break;
2910 case PLUS_EXPR:
2911 case MINUS_EXPR:
2912 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2913 POINTER_PLUS_EXPR. */
2914 if (POINTER_TYPE_P (TREE_TYPE (t)))
2916 error ("invalid operand to plus/minus, type is a pointer");
2917 return t;
2919 CHECK_OP (0, "invalid operand to binary operator");
2920 CHECK_OP (1, "invalid operand to binary operator");
2921 break;
2923 case POINTER_PLUS_EXPR:
2924 /* Check to make sure the first operand is a pointer or reference type. */
2925 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2927 error ("invalid operand to pointer plus, first operand is not a pointer");
2928 return t;
2930 /* Check to make sure the second operand is a ptrofftype. */
2931 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2933 error ("invalid operand to pointer plus, second operand is not an "
2934 "integer type of appropriate width");
2935 return t;
2937 /* FALLTHROUGH */
2938 case LT_EXPR:
2939 case LE_EXPR:
2940 case GT_EXPR:
2941 case GE_EXPR:
2942 case EQ_EXPR:
2943 case NE_EXPR:
2944 case UNORDERED_EXPR:
2945 case ORDERED_EXPR:
2946 case UNLT_EXPR:
2947 case UNLE_EXPR:
2948 case UNGT_EXPR:
2949 case UNGE_EXPR:
2950 case UNEQ_EXPR:
2951 case LTGT_EXPR:
2952 case MULT_EXPR:
2953 case TRUNC_DIV_EXPR:
2954 case CEIL_DIV_EXPR:
2955 case FLOOR_DIV_EXPR:
2956 case ROUND_DIV_EXPR:
2957 case TRUNC_MOD_EXPR:
2958 case CEIL_MOD_EXPR:
2959 case FLOOR_MOD_EXPR:
2960 case ROUND_MOD_EXPR:
2961 case RDIV_EXPR:
2962 case EXACT_DIV_EXPR:
2963 case MIN_EXPR:
2964 case MAX_EXPR:
2965 case LSHIFT_EXPR:
2966 case RSHIFT_EXPR:
2967 case LROTATE_EXPR:
2968 case RROTATE_EXPR:
2969 case BIT_IOR_EXPR:
2970 case BIT_XOR_EXPR:
2971 case BIT_AND_EXPR:
2972 CHECK_OP (0, "invalid operand to binary operator");
2973 CHECK_OP (1, "invalid operand to binary operator");
2974 break;
2976 case CONSTRUCTOR:
2977 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2978 *walk_subtrees = 0;
2979 break;
2981 case CASE_LABEL_EXPR:
2982 if (CASE_CHAIN (t))
2984 error ("invalid CASE_CHAIN");
2985 return t;
2987 break;
2989 default:
2990 break;
2992 return NULL;
2994 #undef CHECK_OP
2998 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2999 Returns true if there is an error, otherwise false. */
3001 static bool
3002 verify_types_in_gimple_min_lval (tree expr)
3004 tree op;
3006 if (is_gimple_id (expr))
3007 return false;
3009 if (TREE_CODE (expr) != TARGET_MEM_REF
3010 && TREE_CODE (expr) != MEM_REF)
3012 error ("invalid expression for min lvalue");
3013 return true;
3016 /* TARGET_MEM_REFs are strange beasts. */
3017 if (TREE_CODE (expr) == TARGET_MEM_REF)
3018 return false;
3020 op = TREE_OPERAND (expr, 0);
3021 if (!is_gimple_val (op))
3023 error ("invalid operand in indirect reference");
3024 debug_generic_stmt (op);
3025 return true;
3027 /* Memory references now generally can involve a value conversion. */
3029 return false;
3032 /* Verify if EXPR is a valid GIMPLE reference expression. If
3033 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3034 if there is an error, otherwise false. */
3036 static bool
3037 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3039 while (handled_component_p (expr))
3041 tree op = TREE_OPERAND (expr, 0);
3043 if (TREE_CODE (expr) == ARRAY_REF
3044 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3046 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3047 || (TREE_OPERAND (expr, 2)
3048 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3049 || (TREE_OPERAND (expr, 3)
3050 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3052 error ("invalid operands to array reference");
3053 debug_generic_stmt (expr);
3054 return true;
3058 /* Verify if the reference array element types are compatible. */
3059 if (TREE_CODE (expr) == ARRAY_REF
3060 && !useless_type_conversion_p (TREE_TYPE (expr),
3061 TREE_TYPE (TREE_TYPE (op))))
3063 error ("type mismatch in array reference");
3064 debug_generic_stmt (TREE_TYPE (expr));
3065 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3066 return true;
3068 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3069 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3070 TREE_TYPE (TREE_TYPE (op))))
3072 error ("type mismatch in array range reference");
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3074 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3075 return true;
3078 if ((TREE_CODE (expr) == REALPART_EXPR
3079 || TREE_CODE (expr) == IMAGPART_EXPR)
3080 && !useless_type_conversion_p (TREE_TYPE (expr),
3081 TREE_TYPE (TREE_TYPE (op))))
3083 error ("type mismatch in real/imagpart reference");
3084 debug_generic_stmt (TREE_TYPE (expr));
3085 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3086 return true;
3089 if (TREE_CODE (expr) == COMPONENT_REF
3090 && !useless_type_conversion_p (TREE_TYPE (expr),
3091 TREE_TYPE (TREE_OPERAND (expr, 1))))
3093 error ("type mismatch in component reference");
3094 debug_generic_stmt (TREE_TYPE (expr));
3095 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3096 return true;
3099 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3101 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3102 that their operand is not an SSA name or an invariant when
3103 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3104 bug). Otherwise there is nothing to verify, gross mismatches at
3105 most invoke undefined behavior. */
3106 if (require_lvalue
3107 && (TREE_CODE (op) == SSA_NAME
3108 || is_gimple_min_invariant (op)))
3110 error ("conversion of an SSA_NAME on the left hand side");
3111 debug_generic_stmt (expr);
3112 return true;
3114 else if (TREE_CODE (op) == SSA_NAME
3115 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3117 error ("conversion of register to a different size");
3118 debug_generic_stmt (expr);
3119 return true;
3121 else if (!handled_component_p (op))
3122 return false;
3125 expr = op;
3128 if (TREE_CODE (expr) == MEM_REF)
3130 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3132 error ("invalid address operand in MEM_REF");
3133 debug_generic_stmt (expr);
3134 return true;
3136 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3137 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3139 error ("invalid offset operand in MEM_REF");
3140 debug_generic_stmt (expr);
3141 return true;
3144 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3146 if (!TMR_BASE (expr)
3147 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3149 error ("invalid address operand in TARGET_MEM_REF");
3150 return true;
3152 if (!TMR_OFFSET (expr)
3153 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3154 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3156 error ("invalid offset operand in TARGET_MEM_REF");
3157 debug_generic_stmt (expr);
3158 return true;
3162 return ((require_lvalue || !is_gimple_min_invariant (expr))
3163 && verify_types_in_gimple_min_lval (expr));
3166 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3167 list of pointer-to types that is trivially convertible to DEST. */
3169 static bool
3170 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3172 tree src;
3174 if (!TYPE_POINTER_TO (src_obj))
3175 return true;
3177 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3178 if (useless_type_conversion_p (dest, src))
3179 return true;
3181 return false;
3184 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3185 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3187 static bool
3188 valid_fixed_convert_types_p (tree type1, tree type2)
3190 return (FIXED_POINT_TYPE_P (type1)
3191 && (INTEGRAL_TYPE_P (type2)
3192 || SCALAR_FLOAT_TYPE_P (type2)
3193 || FIXED_POINT_TYPE_P (type2)));
3196 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3197 is a problem, otherwise false. */
3199 static bool
3200 verify_gimple_call (gimple stmt)
3202 tree fn = gimple_call_fn (stmt);
3203 tree fntype, fndecl;
3204 unsigned i;
3206 if (gimple_call_internal_p (stmt))
3208 if (fn)
3210 error ("gimple call has two targets");
3211 debug_generic_stmt (fn);
3212 return true;
3215 else
3217 if (!fn)
3219 error ("gimple call has no target");
3220 return true;
3224 if (fn && !is_gimple_call_addr (fn))
3226 error ("invalid function in gimple call");
3227 debug_generic_stmt (fn);
3228 return true;
3231 if (fn
3232 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3233 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3234 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3236 error ("non-function in gimple call");
3237 return true;
3240 fndecl = gimple_call_fndecl (stmt);
3241 if (fndecl
3242 && TREE_CODE (fndecl) == FUNCTION_DECL
3243 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3244 && !DECL_PURE_P (fndecl)
3245 && !TREE_READONLY (fndecl))
3247 error ("invalid pure const state for function");
3248 return true;
3251 if (gimple_call_lhs (stmt)
3252 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3253 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3255 error ("invalid LHS in gimple call");
3256 return true;
3259 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3261 error ("LHS in noreturn call");
3262 return true;
3265 fntype = gimple_call_fntype (stmt);
3266 if (fntype
3267 && gimple_call_lhs (stmt)
3268 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3269 TREE_TYPE (fntype))
3270 /* ??? At least C++ misses conversions at assignments from
3271 void * call results.
3272 ??? Java is completely off. Especially with functions
3273 returning java.lang.Object.
3274 For now simply allow arbitrary pointer type conversions. */
3275 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3276 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3278 error ("invalid conversion in gimple call");
3279 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3280 debug_generic_stmt (TREE_TYPE (fntype));
3281 return true;
3284 if (gimple_call_chain (stmt)
3285 && !is_gimple_val (gimple_call_chain (stmt)))
3287 error ("invalid static chain in gimple call");
3288 debug_generic_stmt (gimple_call_chain (stmt));
3289 return true;
3292 /* If there is a static chain argument, this should not be an indirect
3293 call, and the decl should have DECL_STATIC_CHAIN set. */
3294 if (gimple_call_chain (stmt))
3296 if (!gimple_call_fndecl (stmt))
3298 error ("static chain in indirect gimple call");
3299 return true;
3301 fn = TREE_OPERAND (fn, 0);
3303 if (!DECL_STATIC_CHAIN (fn))
3305 error ("static chain with function that doesn%'t use one");
3306 return true;
3310 /* ??? The C frontend passes unpromoted arguments in case it
3311 didn't see a function declaration before the call. So for now
3312 leave the call arguments mostly unverified. Once we gimplify
3313 unit-at-a-time we have a chance to fix this. */
3315 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3317 tree arg = gimple_call_arg (stmt, i);
3318 if ((is_gimple_reg_type (TREE_TYPE (arg))
3319 && !is_gimple_val (arg))
3320 || (!is_gimple_reg_type (TREE_TYPE (arg))
3321 && !is_gimple_lvalue (arg)))
3323 error ("invalid argument to gimple call");
3324 debug_generic_expr (arg);
3325 return true;
3329 return false;
3332 /* Verifies the gimple comparison with the result type TYPE and
3333 the operands OP0 and OP1. */
3335 static bool
3336 verify_gimple_comparison (tree type, tree op0, tree op1)
3338 tree op0_type = TREE_TYPE (op0);
3339 tree op1_type = TREE_TYPE (op1);
3341 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3343 error ("invalid operands in gimple comparison");
3344 return true;
3347 /* For comparisons we do not have the operations type as the
3348 effective type the comparison is carried out in. Instead
3349 we require that either the first operand is trivially
3350 convertible into the second, or the other way around.
3351 Because we special-case pointers to void we allow
3352 comparisons of pointers with the same mode as well. */
3353 if (!useless_type_conversion_p (op0_type, op1_type)
3354 && !useless_type_conversion_p (op1_type, op0_type)
3355 && (!POINTER_TYPE_P (op0_type)
3356 || !POINTER_TYPE_P (op1_type)
3357 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3359 error ("mismatching comparison operand types");
3360 debug_generic_expr (op0_type);
3361 debug_generic_expr (op1_type);
3362 return true;
3365 /* The resulting type of a comparison may be an effective boolean type. */
3366 if (INTEGRAL_TYPE_P (type)
3367 && (TREE_CODE (type) == BOOLEAN_TYPE
3368 || TYPE_PRECISION (type) == 1))
3370 if (TREE_CODE (op0_type) == VECTOR_TYPE
3371 || TREE_CODE (op1_type) == VECTOR_TYPE)
3373 error ("vector comparison returning a boolean");
3374 debug_generic_expr (op0_type);
3375 debug_generic_expr (op1_type);
3376 return true;
3379 /* Or an integer vector type with the same size and element count
3380 as the comparison operand types. */
3381 else if (TREE_CODE (type) == VECTOR_TYPE
3382 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3384 if (TREE_CODE (op0_type) != VECTOR_TYPE
3385 || TREE_CODE (op1_type) != VECTOR_TYPE)
3387 error ("non-vector operands in vector comparison");
3388 debug_generic_expr (op0_type);
3389 debug_generic_expr (op1_type);
3390 return true;
3393 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3394 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3395 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3396 /* The result of a vector comparison is of signed
3397 integral type. */
3398 || TYPE_UNSIGNED (TREE_TYPE (type)))
3400 error ("invalid vector comparison resulting type");
3401 debug_generic_expr (type);
3402 return true;
3405 else
3407 error ("bogus comparison result type");
3408 debug_generic_expr (type);
3409 return true;
3412 return false;
3415 /* Verify a gimple assignment statement STMT with an unary rhs.
3416 Returns true if anything is wrong. */
3418 static bool
3419 verify_gimple_assign_unary (gimple stmt)
3421 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3422 tree lhs = gimple_assign_lhs (stmt);
3423 tree lhs_type = TREE_TYPE (lhs);
3424 tree rhs1 = gimple_assign_rhs1 (stmt);
3425 tree rhs1_type = TREE_TYPE (rhs1);
3427 if (!is_gimple_reg (lhs))
3429 error ("non-register as LHS of unary operation");
3430 return true;
3433 if (!is_gimple_val (rhs1))
3435 error ("invalid operand in unary operation");
3436 return true;
3439 /* First handle conversions. */
3440 switch (rhs_code)
3442 CASE_CONVERT:
3444 /* Allow conversions from pointer type to integral type only if
3445 there is no sign or zero extension involved.
3446 For targets were the precision of ptrofftype doesn't match that
3447 of pointers we need to allow arbitrary conversions to ptrofftype. */
3448 if ((POINTER_TYPE_P (lhs_type)
3449 && INTEGRAL_TYPE_P (rhs1_type))
3450 || (POINTER_TYPE_P (rhs1_type)
3451 && INTEGRAL_TYPE_P (lhs_type)
3452 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3453 || ptrofftype_p (sizetype))))
3454 return false;
3456 /* Allow conversion from integral to offset type and vice versa. */
3457 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3458 && INTEGRAL_TYPE_P (rhs1_type))
3459 || (INTEGRAL_TYPE_P (lhs_type)
3460 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3461 return false;
3463 /* Otherwise assert we are converting between types of the
3464 same kind. */
3465 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3467 error ("invalid types in nop conversion");
3468 debug_generic_expr (lhs_type);
3469 debug_generic_expr (rhs1_type);
3470 return true;
3473 return false;
3476 case ADDR_SPACE_CONVERT_EXPR:
3478 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3479 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3480 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3482 error ("invalid types in address space conversion");
3483 debug_generic_expr (lhs_type);
3484 debug_generic_expr (rhs1_type);
3485 return true;
3488 return false;
3491 case FIXED_CONVERT_EXPR:
3493 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3494 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3496 error ("invalid types in fixed-point conversion");
3497 debug_generic_expr (lhs_type);
3498 debug_generic_expr (rhs1_type);
3499 return true;
3502 return false;
3505 case FLOAT_EXPR:
3507 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3508 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3509 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3511 error ("invalid types in conversion to floating point");
3512 debug_generic_expr (lhs_type);
3513 debug_generic_expr (rhs1_type);
3514 return true;
3517 return false;
3520 case FIX_TRUNC_EXPR:
3522 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3523 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3524 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3526 error ("invalid types in conversion to integer");
3527 debug_generic_expr (lhs_type);
3528 debug_generic_expr (rhs1_type);
3529 return true;
3532 return false;
3535 case VEC_UNPACK_HI_EXPR:
3536 case VEC_UNPACK_LO_EXPR:
3537 case REDUC_MAX_EXPR:
3538 case REDUC_MIN_EXPR:
3539 case REDUC_PLUS_EXPR:
3540 case VEC_UNPACK_FLOAT_HI_EXPR:
3541 case VEC_UNPACK_FLOAT_LO_EXPR:
3542 /* FIXME. */
3543 return false;
3545 case NEGATE_EXPR:
3546 case ABS_EXPR:
3547 case BIT_NOT_EXPR:
3548 case PAREN_EXPR:
3549 case NON_LVALUE_EXPR:
3550 case CONJ_EXPR:
3551 break;
3553 default:
3554 gcc_unreachable ();
3557 /* For the remaining codes assert there is no conversion involved. */
3558 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3560 error ("non-trivial conversion in unary operation");
3561 debug_generic_expr (lhs_type);
3562 debug_generic_expr (rhs1_type);
3563 return true;
3566 return false;
3569 /* Verify a gimple assignment statement STMT with a binary rhs.
3570 Returns true if anything is wrong. */
3572 static bool
3573 verify_gimple_assign_binary (gimple stmt)
3575 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3576 tree lhs = gimple_assign_lhs (stmt);
3577 tree lhs_type = TREE_TYPE (lhs);
3578 tree rhs1 = gimple_assign_rhs1 (stmt);
3579 tree rhs1_type = TREE_TYPE (rhs1);
3580 tree rhs2 = gimple_assign_rhs2 (stmt);
3581 tree rhs2_type = TREE_TYPE (rhs2);
3583 if (!is_gimple_reg (lhs))
3585 error ("non-register as LHS of binary operation");
3586 return true;
3589 if (!is_gimple_val (rhs1)
3590 || !is_gimple_val (rhs2))
3592 error ("invalid operands in binary operation");
3593 return true;
3596 /* First handle operations that involve different types. */
3597 switch (rhs_code)
3599 case COMPLEX_EXPR:
3601 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3602 || !(INTEGRAL_TYPE_P (rhs1_type)
3603 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3604 || !(INTEGRAL_TYPE_P (rhs2_type)
3605 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3607 error ("type mismatch in complex expression");
3608 debug_generic_expr (lhs_type);
3609 debug_generic_expr (rhs1_type);
3610 debug_generic_expr (rhs2_type);
3611 return true;
3614 return false;
3617 case LSHIFT_EXPR:
3618 case RSHIFT_EXPR:
3619 case LROTATE_EXPR:
3620 case RROTATE_EXPR:
3622 /* Shifts and rotates are ok on integral types, fixed point
3623 types and integer vector types. */
3624 if ((!INTEGRAL_TYPE_P (rhs1_type)
3625 && !FIXED_POINT_TYPE_P (rhs1_type)
3626 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3627 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3628 || (!INTEGRAL_TYPE_P (rhs2_type)
3629 /* Vector shifts of vectors are also ok. */
3630 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3631 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3632 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3633 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3634 || !useless_type_conversion_p (lhs_type, rhs1_type))
3636 error ("type mismatch in shift expression");
3637 debug_generic_expr (lhs_type);
3638 debug_generic_expr (rhs1_type);
3639 debug_generic_expr (rhs2_type);
3640 return true;
3643 return false;
3646 case VEC_LSHIFT_EXPR:
3647 case VEC_RSHIFT_EXPR:
3649 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3650 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3651 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3652 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3653 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3654 || (!INTEGRAL_TYPE_P (rhs2_type)
3655 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3656 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3657 || !useless_type_conversion_p (lhs_type, rhs1_type))
3659 error ("type mismatch in vector shift expression");
3660 debug_generic_expr (lhs_type);
3661 debug_generic_expr (rhs1_type);
3662 debug_generic_expr (rhs2_type);
3663 return true;
3665 /* For shifting a vector of non-integral components we
3666 only allow shifting by a constant multiple of the element size. */
3667 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3668 && (TREE_CODE (rhs2) != INTEGER_CST
3669 || !div_if_zero_remainder (rhs2,
3670 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3672 error ("non-element sized vector shift of floating point vector");
3673 return true;
3676 return false;
3679 case WIDEN_LSHIFT_EXPR:
3681 if (!INTEGRAL_TYPE_P (lhs_type)
3682 || !INTEGRAL_TYPE_P (rhs1_type)
3683 || TREE_CODE (rhs2) != INTEGER_CST
3684 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3686 error ("type mismatch in widening vector shift expression");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 debug_generic_expr (rhs2_type);
3690 return true;
3693 return false;
3696 case VEC_WIDEN_LSHIFT_HI_EXPR:
3697 case VEC_WIDEN_LSHIFT_LO_EXPR:
3699 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3700 || TREE_CODE (lhs_type) != VECTOR_TYPE
3701 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3702 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3703 || TREE_CODE (rhs2) != INTEGER_CST
3704 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3705 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3707 error ("type mismatch in widening vector shift expression");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 debug_generic_expr (rhs2_type);
3711 return true;
3714 return false;
3717 case PLUS_EXPR:
3718 case MINUS_EXPR:
3720 tree lhs_etype = lhs_type;
3721 tree rhs1_etype = rhs1_type;
3722 tree rhs2_etype = rhs2_type;
3723 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3725 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3726 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3728 error ("invalid non-vector operands to vector valued plus");
3729 return true;
3731 lhs_etype = TREE_TYPE (lhs_type);
3732 rhs1_etype = TREE_TYPE (rhs1_type);
3733 rhs2_etype = TREE_TYPE (rhs2_type);
3735 if (POINTER_TYPE_P (lhs_etype)
3736 || POINTER_TYPE_P (rhs1_etype)
3737 || POINTER_TYPE_P (rhs2_etype))
3739 error ("invalid (pointer) operands to plus/minus");
3740 return true;
3743 /* Continue with generic binary expression handling. */
3744 break;
3747 case POINTER_PLUS_EXPR:
3749 if (!POINTER_TYPE_P (rhs1_type)
3750 || !useless_type_conversion_p (lhs_type, rhs1_type)
3751 || !ptrofftype_p (rhs2_type))
3753 error ("type mismatch in pointer plus expression");
3754 debug_generic_stmt (lhs_type);
3755 debug_generic_stmt (rhs1_type);
3756 debug_generic_stmt (rhs2_type);
3757 return true;
3760 return false;
3763 case TRUTH_ANDIF_EXPR:
3764 case TRUTH_ORIF_EXPR:
3765 case TRUTH_AND_EXPR:
3766 case TRUTH_OR_EXPR:
3767 case TRUTH_XOR_EXPR:
3769 gcc_unreachable ();
3771 case LT_EXPR:
3772 case LE_EXPR:
3773 case GT_EXPR:
3774 case GE_EXPR:
3775 case EQ_EXPR:
3776 case NE_EXPR:
3777 case UNORDERED_EXPR:
3778 case ORDERED_EXPR:
3779 case UNLT_EXPR:
3780 case UNLE_EXPR:
3781 case UNGT_EXPR:
3782 case UNGE_EXPR:
3783 case UNEQ_EXPR:
3784 case LTGT_EXPR:
3785 /* Comparisons are also binary, but the result type is not
3786 connected to the operand types. */
3787 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3789 case WIDEN_MULT_EXPR:
3790 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3791 return true;
3792 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3793 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3795 case WIDEN_SUM_EXPR:
3796 case VEC_WIDEN_MULT_HI_EXPR:
3797 case VEC_WIDEN_MULT_LO_EXPR:
3798 case VEC_WIDEN_MULT_EVEN_EXPR:
3799 case VEC_WIDEN_MULT_ODD_EXPR:
3800 case VEC_PACK_TRUNC_EXPR:
3801 case VEC_PACK_SAT_EXPR:
3802 case VEC_PACK_FIX_TRUNC_EXPR:
3803 /* FIXME. */
3804 return false;
3806 case MULT_EXPR:
3807 case MULT_HIGHPART_EXPR:
3808 case TRUNC_DIV_EXPR:
3809 case CEIL_DIV_EXPR:
3810 case FLOOR_DIV_EXPR:
3811 case ROUND_DIV_EXPR:
3812 case TRUNC_MOD_EXPR:
3813 case CEIL_MOD_EXPR:
3814 case FLOOR_MOD_EXPR:
3815 case ROUND_MOD_EXPR:
3816 case RDIV_EXPR:
3817 case EXACT_DIV_EXPR:
3818 case MIN_EXPR:
3819 case MAX_EXPR:
3820 case BIT_IOR_EXPR:
3821 case BIT_XOR_EXPR:
3822 case BIT_AND_EXPR:
3823 /* Continue with generic binary expression handling. */
3824 break;
3826 default:
3827 gcc_unreachable ();
3830 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3831 || !useless_type_conversion_p (lhs_type, rhs2_type))
3833 error ("type mismatch in binary expression");
3834 debug_generic_stmt (lhs_type);
3835 debug_generic_stmt (rhs1_type);
3836 debug_generic_stmt (rhs2_type);
3837 return true;
3840 return false;
3843 /* Verify a gimple assignment statement STMT with a ternary rhs.
3844 Returns true if anything is wrong. */
3846 static bool
3847 verify_gimple_assign_ternary (gimple stmt)
3849 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3850 tree lhs = gimple_assign_lhs (stmt);
3851 tree lhs_type = TREE_TYPE (lhs);
3852 tree rhs1 = gimple_assign_rhs1 (stmt);
3853 tree rhs1_type = TREE_TYPE (rhs1);
3854 tree rhs2 = gimple_assign_rhs2 (stmt);
3855 tree rhs2_type = TREE_TYPE (rhs2);
3856 tree rhs3 = gimple_assign_rhs3 (stmt);
3857 tree rhs3_type = TREE_TYPE (rhs3);
3859 if (!is_gimple_reg (lhs))
3861 error ("non-register as LHS of ternary operation");
3862 return true;
3865 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3866 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3867 || !is_gimple_val (rhs2)
3868 || !is_gimple_val (rhs3))
3870 error ("invalid operands in ternary operation");
3871 return true;
3874 /* First handle operations that involve different types. */
3875 switch (rhs_code)
3877 case WIDEN_MULT_PLUS_EXPR:
3878 case WIDEN_MULT_MINUS_EXPR:
3879 if ((!INTEGRAL_TYPE_P (rhs1_type)
3880 && !FIXED_POINT_TYPE_P (rhs1_type))
3881 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3882 || !useless_type_conversion_p (lhs_type, rhs3_type)
3883 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3884 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3886 error ("type mismatch in widening multiply-accumulate expression");
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 debug_generic_expr (rhs2_type);
3890 debug_generic_expr (rhs3_type);
3891 return true;
3893 break;
3895 case FMA_EXPR:
3896 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3897 || !useless_type_conversion_p (lhs_type, rhs2_type)
3898 || !useless_type_conversion_p (lhs_type, rhs3_type))
3900 error ("type mismatch in fused multiply-add expression");
3901 debug_generic_expr (lhs_type);
3902 debug_generic_expr (rhs1_type);
3903 debug_generic_expr (rhs2_type);
3904 debug_generic_expr (rhs3_type);
3905 return true;
3907 break;
3909 case COND_EXPR:
3910 case VEC_COND_EXPR:
3911 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3912 || !useless_type_conversion_p (lhs_type, rhs3_type))
3914 error ("type mismatch in conditional expression");
3915 debug_generic_expr (lhs_type);
3916 debug_generic_expr (rhs2_type);
3917 debug_generic_expr (rhs3_type);
3918 return true;
3920 break;
3922 case VEC_PERM_EXPR:
3923 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3924 || !useless_type_conversion_p (lhs_type, rhs2_type))
3926 error ("type mismatch in vector permute expression");
3927 debug_generic_expr (lhs_type);
3928 debug_generic_expr (rhs1_type);
3929 debug_generic_expr (rhs2_type);
3930 debug_generic_expr (rhs3_type);
3931 return true;
3934 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3935 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3936 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3938 error ("vector types expected in vector permute expression");
3939 debug_generic_expr (lhs_type);
3940 debug_generic_expr (rhs1_type);
3941 debug_generic_expr (rhs2_type);
3942 debug_generic_expr (rhs3_type);
3943 return true;
3946 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3947 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3948 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3949 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3950 != TYPE_VECTOR_SUBPARTS (lhs_type))
3952 error ("vectors with different element number found "
3953 "in vector permute expression");
3954 debug_generic_expr (lhs_type);
3955 debug_generic_expr (rhs1_type);
3956 debug_generic_expr (rhs2_type);
3957 debug_generic_expr (rhs3_type);
3958 return true;
3961 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3962 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3963 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3965 error ("invalid mask type in vector permute expression");
3966 debug_generic_expr (lhs_type);
3967 debug_generic_expr (rhs1_type);
3968 debug_generic_expr (rhs2_type);
3969 debug_generic_expr (rhs3_type);
3970 return true;
3973 return false;
3975 case SAD_EXPR:
3976 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
3977 || !useless_type_conversion_p (lhs_type, rhs3_type)
3978 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
3979 (TYPE_MODE (TREE_TYPE (rhs1_type))))
3980 > GET_MODE_BITSIZE (GET_MODE_INNER
3981 (TYPE_MODE (TREE_TYPE (lhs_type)))))
3983 error ("type mismatch in sad expression");
3984 debug_generic_expr (lhs_type);
3985 debug_generic_expr (rhs1_type);
3986 debug_generic_expr (rhs2_type);
3987 debug_generic_expr (rhs3_type);
3988 return true;
3991 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3992 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3993 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3995 error ("vector types expected in sad expression");
3996 debug_generic_expr (lhs_type);
3997 debug_generic_expr (rhs1_type);
3998 debug_generic_expr (rhs2_type);
3999 debug_generic_expr (rhs3_type);
4000 return true;
4003 return false;
4005 case DOT_PROD_EXPR:
4006 case REALIGN_LOAD_EXPR:
4007 /* FIXME. */
4008 return false;
4010 default:
4011 gcc_unreachable ();
4013 return false;
4016 /* Verify a gimple assignment statement STMT with a single rhs.
4017 Returns true if anything is wrong. */
4019 static bool
4020 verify_gimple_assign_single (gimple stmt)
4022 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4023 tree lhs = gimple_assign_lhs (stmt);
4024 tree lhs_type = TREE_TYPE (lhs);
4025 tree rhs1 = gimple_assign_rhs1 (stmt);
4026 tree rhs1_type = TREE_TYPE (rhs1);
4027 bool res = false;
4029 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4031 error ("non-trivial conversion at assignment");
4032 debug_generic_expr (lhs_type);
4033 debug_generic_expr (rhs1_type);
4034 return true;
4037 if (gimple_clobber_p (stmt)
4038 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4040 error ("non-decl/MEM_REF LHS in clobber statement");
4041 debug_generic_expr (lhs);
4042 return true;
4045 if (handled_component_p (lhs)
4046 || TREE_CODE (lhs) == MEM_REF
4047 || TREE_CODE (lhs) == TARGET_MEM_REF)
4048 res |= verify_types_in_gimple_reference (lhs, true);
4050 /* Special codes we cannot handle via their class. */
4051 switch (rhs_code)
4053 case ADDR_EXPR:
4055 tree op = TREE_OPERAND (rhs1, 0);
4056 if (!is_gimple_addressable (op))
4058 error ("invalid operand in unary expression");
4059 return true;
4062 /* Technically there is no longer a need for matching types, but
4063 gimple hygiene asks for this check. In LTO we can end up
4064 combining incompatible units and thus end up with addresses
4065 of globals that change their type to a common one. */
4066 if (!in_lto_p
4067 && !types_compatible_p (TREE_TYPE (op),
4068 TREE_TYPE (TREE_TYPE (rhs1)))
4069 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4070 TREE_TYPE (op)))
4072 error ("type mismatch in address expression");
4073 debug_generic_stmt (TREE_TYPE (rhs1));
4074 debug_generic_stmt (TREE_TYPE (op));
4075 return true;
4078 return verify_types_in_gimple_reference (op, true);
4081 /* tcc_reference */
4082 case INDIRECT_REF:
4083 error ("INDIRECT_REF in gimple IL");
4084 return true;
4086 case COMPONENT_REF:
4087 case BIT_FIELD_REF:
4088 case ARRAY_REF:
4089 case ARRAY_RANGE_REF:
4090 case VIEW_CONVERT_EXPR:
4091 case REALPART_EXPR:
4092 case IMAGPART_EXPR:
4093 case TARGET_MEM_REF:
4094 case MEM_REF:
4095 if (!is_gimple_reg (lhs)
4096 && is_gimple_reg_type (TREE_TYPE (lhs)))
4098 error ("invalid rhs for gimple memory store");
4099 debug_generic_stmt (lhs);
4100 debug_generic_stmt (rhs1);
4101 return true;
4103 return res || verify_types_in_gimple_reference (rhs1, false);
4105 /* tcc_constant */
4106 case SSA_NAME:
4107 case INTEGER_CST:
4108 case REAL_CST:
4109 case FIXED_CST:
4110 case COMPLEX_CST:
4111 case VECTOR_CST:
4112 case STRING_CST:
4113 return res;
4115 /* tcc_declaration */
4116 case CONST_DECL:
4117 return res;
4118 case VAR_DECL:
4119 case PARM_DECL:
4120 if (!is_gimple_reg (lhs)
4121 && !is_gimple_reg (rhs1)
4122 && is_gimple_reg_type (TREE_TYPE (lhs)))
4124 error ("invalid rhs for gimple memory store");
4125 debug_generic_stmt (lhs);
4126 debug_generic_stmt (rhs1);
4127 return true;
4129 return res;
4131 case CONSTRUCTOR:
4132 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4134 unsigned int i;
4135 tree elt_i, elt_v, elt_t = NULL_TREE;
4137 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4138 return res;
4139 /* For vector CONSTRUCTORs we require that either it is empty
4140 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4141 (then the element count must be correct to cover the whole
4142 outer vector and index must be NULL on all elements, or it is
4143 a CONSTRUCTOR of scalar elements, where we as an exception allow
4144 smaller number of elements (assuming zero filling) and
4145 consecutive indexes as compared to NULL indexes (such
4146 CONSTRUCTORs can appear in the IL from FEs). */
4147 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4149 if (elt_t == NULL_TREE)
4151 elt_t = TREE_TYPE (elt_v);
4152 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4154 tree elt_t = TREE_TYPE (elt_v);
4155 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4156 TREE_TYPE (elt_t)))
4158 error ("incorrect type of vector CONSTRUCTOR"
4159 " elements");
4160 debug_generic_stmt (rhs1);
4161 return true;
4163 else if (CONSTRUCTOR_NELTS (rhs1)
4164 * TYPE_VECTOR_SUBPARTS (elt_t)
4165 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4167 error ("incorrect number of vector CONSTRUCTOR"
4168 " elements");
4169 debug_generic_stmt (rhs1);
4170 return true;
4173 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4174 elt_t))
4176 error ("incorrect type of vector CONSTRUCTOR elements");
4177 debug_generic_stmt (rhs1);
4178 return true;
4180 else if (CONSTRUCTOR_NELTS (rhs1)
4181 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4183 error ("incorrect number of vector CONSTRUCTOR elements");
4184 debug_generic_stmt (rhs1);
4185 return true;
4188 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4190 error ("incorrect type of vector CONSTRUCTOR elements");
4191 debug_generic_stmt (rhs1);
4192 return true;
4194 if (elt_i != NULL_TREE
4195 && (TREE_CODE (elt_t) == VECTOR_TYPE
4196 || TREE_CODE (elt_i) != INTEGER_CST
4197 || compare_tree_int (elt_i, i) != 0))
4199 error ("vector CONSTRUCTOR with non-NULL element index");
4200 debug_generic_stmt (rhs1);
4201 return true;
4205 return res;
4206 case OBJ_TYPE_REF:
4207 case ASSERT_EXPR:
4208 case WITH_SIZE_EXPR:
4209 /* FIXME. */
4210 return res;
4212 default:;
4215 return res;
4218 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4219 is a problem, otherwise false. */
4221 static bool
4222 verify_gimple_assign (gimple stmt)
4224 switch (gimple_assign_rhs_class (stmt))
4226 case GIMPLE_SINGLE_RHS:
4227 return verify_gimple_assign_single (stmt);
4229 case GIMPLE_UNARY_RHS:
4230 return verify_gimple_assign_unary (stmt);
4232 case GIMPLE_BINARY_RHS:
4233 return verify_gimple_assign_binary (stmt);
4235 case GIMPLE_TERNARY_RHS:
4236 return verify_gimple_assign_ternary (stmt);
4238 default:
4239 gcc_unreachable ();
4243 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4244 is a problem, otherwise false. */
4246 static bool
4247 verify_gimple_return (gimple stmt)
4249 tree op = gimple_return_retval (stmt);
4250 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4252 /* We cannot test for present return values as we do not fix up missing
4253 return values from the original source. */
4254 if (op == NULL)
4255 return false;
4257 if (!is_gimple_val (op)
4258 && TREE_CODE (op) != RESULT_DECL)
4260 error ("invalid operand in return statement");
4261 debug_generic_stmt (op);
4262 return true;
4265 if ((TREE_CODE (op) == RESULT_DECL
4266 && DECL_BY_REFERENCE (op))
4267 || (TREE_CODE (op) == SSA_NAME
4268 && SSA_NAME_VAR (op)
4269 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4270 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4271 op = TREE_TYPE (op);
4273 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4275 error ("invalid conversion in return statement");
4276 debug_generic_stmt (restype);
4277 debug_generic_stmt (TREE_TYPE (op));
4278 return true;
4281 return false;
4285 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4286 is a problem, otherwise false. */
4288 static bool
4289 verify_gimple_goto (gimple stmt)
4291 tree dest = gimple_goto_dest (stmt);
4293 /* ??? We have two canonical forms of direct goto destinations, a
4294 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4295 if (TREE_CODE (dest) != LABEL_DECL
4296 && (!is_gimple_val (dest)
4297 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4299 error ("goto destination is neither a label nor a pointer");
4300 return true;
4303 return false;
4306 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4307 is a problem, otherwise false. */
4309 static bool
4310 verify_gimple_switch (gimple stmt)
4312 unsigned int i, n;
4313 tree elt, prev_upper_bound = NULL_TREE;
4314 tree index_type, elt_type = NULL_TREE;
4316 if (!is_gimple_val (gimple_switch_index (stmt)))
4318 error ("invalid operand to switch statement");
4319 debug_generic_stmt (gimple_switch_index (stmt));
4320 return true;
4323 index_type = TREE_TYPE (gimple_switch_index (stmt));
4324 if (! INTEGRAL_TYPE_P (index_type))
4326 error ("non-integral type switch statement");
4327 debug_generic_expr (index_type);
4328 return true;
4331 elt = gimple_switch_label (stmt, 0);
4332 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4334 error ("invalid default case label in switch statement");
4335 debug_generic_expr (elt);
4336 return true;
4339 n = gimple_switch_num_labels (stmt);
4340 for (i = 1; i < n; i++)
4342 elt = gimple_switch_label (stmt, i);
4344 if (! CASE_LOW (elt))
4346 error ("invalid case label in switch statement");
4347 debug_generic_expr (elt);
4348 return true;
4350 if (CASE_HIGH (elt)
4351 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4353 error ("invalid case range in switch statement");
4354 debug_generic_expr (elt);
4355 return true;
4358 if (elt_type)
4360 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4361 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4363 error ("type mismatch for case label in switch statement");
4364 debug_generic_expr (elt);
4365 return true;
4368 else
4370 elt_type = TREE_TYPE (CASE_LOW (elt));
4371 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4373 error ("type precision mismatch in switch statement");
4374 return true;
4378 if (prev_upper_bound)
4380 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4382 error ("case labels not sorted in switch statement");
4383 return true;
4387 prev_upper_bound = CASE_HIGH (elt);
4388 if (! prev_upper_bound)
4389 prev_upper_bound = CASE_LOW (elt);
4392 return false;
4395 /* Verify a gimple debug statement STMT.
4396 Returns true if anything is wrong. */
4398 static bool
4399 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4401 /* There isn't much that could be wrong in a gimple debug stmt. A
4402 gimple debug bind stmt, for example, maps a tree, that's usually
4403 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4404 component or member of an aggregate type, to another tree, that
4405 can be an arbitrary expression. These stmts expand into debug
4406 insns, and are converted to debug notes by var-tracking.c. */
4407 return false;
4410 /* Verify a gimple label statement STMT.
4411 Returns true if anything is wrong. */
4413 static bool
4414 verify_gimple_label (gimple stmt)
4416 tree decl = gimple_label_label (stmt);
4417 int uid;
4418 bool err = false;
4420 if (TREE_CODE (decl) != LABEL_DECL)
4421 return true;
4422 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4423 && DECL_CONTEXT (decl) != current_function_decl)
4425 error ("label's context is not the current function decl");
4426 err |= true;
4429 uid = LABEL_DECL_UID (decl);
4430 if (cfun->cfg
4431 && (uid == -1
4432 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4434 error ("incorrect entry in label_to_block_map");
4435 err |= true;
4438 uid = EH_LANDING_PAD_NR (decl);
4439 if (uid)
4441 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4442 if (decl != lp->post_landing_pad)
4444 error ("incorrect setting of landing pad number");
4445 err |= true;
4449 return err;
4452 /* Verify the GIMPLE statement STMT. Returns true if there is an
4453 error, otherwise false. */
4455 static bool
4456 verify_gimple_stmt (gimple stmt)
4458 switch (gimple_code (stmt))
4460 case GIMPLE_ASSIGN:
4461 return verify_gimple_assign (stmt);
4463 case GIMPLE_LABEL:
4464 return verify_gimple_label (stmt);
4466 case GIMPLE_CALL:
4467 return verify_gimple_call (stmt);
4469 case GIMPLE_COND:
4470 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4472 error ("invalid comparison code in gimple cond");
4473 return true;
4475 if (!(!gimple_cond_true_label (stmt)
4476 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4477 || !(!gimple_cond_false_label (stmt)
4478 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4480 error ("invalid labels in gimple cond");
4481 return true;
4484 return verify_gimple_comparison (boolean_type_node,
4485 gimple_cond_lhs (stmt),
4486 gimple_cond_rhs (stmt));
4488 case GIMPLE_GOTO:
4489 return verify_gimple_goto (stmt);
4491 case GIMPLE_SWITCH:
4492 return verify_gimple_switch (stmt);
4494 case GIMPLE_RETURN:
4495 return verify_gimple_return (stmt);
4497 case GIMPLE_ASM:
4498 return false;
4500 case GIMPLE_TRANSACTION:
4501 return verify_gimple_transaction (stmt);
4503 /* Tuples that do not have tree operands. */
4504 case GIMPLE_NOP:
4505 case GIMPLE_PREDICT:
4506 case GIMPLE_RESX:
4507 case GIMPLE_EH_DISPATCH:
4508 case GIMPLE_EH_MUST_NOT_THROW:
4509 return false;
4511 CASE_GIMPLE_OMP:
4512 /* OpenMP directives are validated by the FE and never operated
4513 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4514 non-gimple expressions when the main index variable has had
4515 its address taken. This does not affect the loop itself
4516 because the header of an GIMPLE_OMP_FOR is merely used to determine
4517 how to setup the parallel iteration. */
4518 return false;
4520 case GIMPLE_DEBUG:
4521 return verify_gimple_debug (stmt);
4523 default:
4524 gcc_unreachable ();
4528 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4529 and false otherwise. */
4531 static bool
4532 verify_gimple_phi (gimple phi)
4534 bool err = false;
4535 unsigned i;
4536 tree phi_result = gimple_phi_result (phi);
4537 bool virtual_p;
4539 if (!phi_result)
4541 error ("invalid PHI result");
4542 return true;
4545 virtual_p = virtual_operand_p (phi_result);
4546 if (TREE_CODE (phi_result) != SSA_NAME
4547 || (virtual_p
4548 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4550 error ("invalid PHI result");
4551 err = true;
4554 for (i = 0; i < gimple_phi_num_args (phi); i++)
4556 tree t = gimple_phi_arg_def (phi, i);
4558 if (!t)
4560 error ("missing PHI def");
4561 err |= true;
4562 continue;
4564 /* Addressable variables do have SSA_NAMEs but they
4565 are not considered gimple values. */
4566 else if ((TREE_CODE (t) == SSA_NAME
4567 && virtual_p != virtual_operand_p (t))
4568 || (virtual_p
4569 && (TREE_CODE (t) != SSA_NAME
4570 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4571 || (!virtual_p
4572 && !is_gimple_val (t)))
4574 error ("invalid PHI argument");
4575 debug_generic_expr (t);
4576 err |= true;
4578 #ifdef ENABLE_TYPES_CHECKING
4579 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4581 error ("incompatible types in PHI argument %u", i);
4582 debug_generic_stmt (TREE_TYPE (phi_result));
4583 debug_generic_stmt (TREE_TYPE (t));
4584 err |= true;
4586 #endif
4589 return err;
4592 /* Verify the GIMPLE statements inside the sequence STMTS. */
4594 static bool
4595 verify_gimple_in_seq_2 (gimple_seq stmts)
4597 gimple_stmt_iterator ittr;
4598 bool err = false;
4600 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4602 gimple stmt = gsi_stmt (ittr);
4604 switch (gimple_code (stmt))
4606 case GIMPLE_BIND:
4607 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4608 break;
4610 case GIMPLE_TRY:
4611 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4612 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4613 break;
4615 case GIMPLE_EH_FILTER:
4616 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4617 break;
4619 case GIMPLE_EH_ELSE:
4620 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4621 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4622 break;
4624 case GIMPLE_CATCH:
4625 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4626 break;
4628 case GIMPLE_TRANSACTION:
4629 err |= verify_gimple_transaction (stmt);
4630 break;
4632 default:
4634 bool err2 = verify_gimple_stmt (stmt);
4635 if (err2)
4636 debug_gimple_stmt (stmt);
4637 err |= err2;
4642 return err;
4645 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4646 is a problem, otherwise false. */
4648 static bool
4649 verify_gimple_transaction (gimple stmt)
4651 tree lab = gimple_transaction_label (stmt);
4652 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4653 return true;
4654 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4658 /* Verify the GIMPLE statements inside the statement list STMTS. */
4660 DEBUG_FUNCTION void
4661 verify_gimple_in_seq (gimple_seq stmts)
4663 timevar_push (TV_TREE_STMT_VERIFY);
4664 if (verify_gimple_in_seq_2 (stmts))
4665 internal_error ("verify_gimple failed");
4666 timevar_pop (TV_TREE_STMT_VERIFY);
4669 /* Return true when the T can be shared. */
4671 static bool
4672 tree_node_can_be_shared (tree t)
4674 if (IS_TYPE_OR_DECL_P (t)
4675 || is_gimple_min_invariant (t)
4676 || TREE_CODE (t) == SSA_NAME
4677 || t == error_mark_node
4678 || TREE_CODE (t) == IDENTIFIER_NODE)
4679 return true;
4681 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4682 return true;
4684 if (DECL_P (t))
4685 return true;
4687 return false;
4690 /* Called via walk_tree. Verify tree sharing. */
4692 static tree
4693 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4695 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4697 if (tree_node_can_be_shared (*tp))
4699 *walk_subtrees = false;
4700 return NULL;
4703 if (pointer_set_insert (visited, *tp))
4704 return *tp;
4706 return NULL;
4709 /* Called via walk_gimple_stmt. Verify tree sharing. */
4711 static tree
4712 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4714 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4715 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4718 static bool eh_error_found;
4719 static int
4720 verify_eh_throw_stmt_node (void **slot, void *data)
4722 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4723 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4725 if (!pointer_set_contains (visited, node->stmt))
4727 error ("dead STMT in EH table");
4728 debug_gimple_stmt (node->stmt);
4729 eh_error_found = true;
4731 return 1;
4734 /* Verify if the location LOCs block is in BLOCKS. */
4736 static bool
4737 verify_location (pointer_set_t *blocks, location_t loc)
4739 tree block = LOCATION_BLOCK (loc);
4740 if (block != NULL_TREE
4741 && !pointer_set_contains (blocks, block))
4743 error ("location references block not in block tree");
4744 return true;
4746 if (block != NULL_TREE)
4747 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4748 return false;
4751 /* Called via walk_tree. Verify that expressions have no blocks. */
4753 static tree
4754 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4756 if (!EXPR_P (*tp))
4758 *walk_subtrees = false;
4759 return NULL;
4762 location_t loc = EXPR_LOCATION (*tp);
4763 if (LOCATION_BLOCK (loc) != NULL)
4764 return *tp;
4766 return NULL;
4769 /* Called via walk_tree. Verify locations of expressions. */
4771 static tree
4772 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4774 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4776 if (TREE_CODE (*tp) == VAR_DECL
4777 && DECL_HAS_DEBUG_EXPR_P (*tp))
4779 tree t = DECL_DEBUG_EXPR (*tp);
4780 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4781 if (addr)
4782 return addr;
4784 if ((TREE_CODE (*tp) == VAR_DECL
4785 || TREE_CODE (*tp) == PARM_DECL
4786 || TREE_CODE (*tp) == RESULT_DECL)
4787 && DECL_HAS_VALUE_EXPR_P (*tp))
4789 tree t = DECL_VALUE_EXPR (*tp);
4790 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4791 if (addr)
4792 return addr;
4795 if (!EXPR_P (*tp))
4797 *walk_subtrees = false;
4798 return NULL;
4801 location_t loc = EXPR_LOCATION (*tp);
4802 if (verify_location (blocks, loc))
4803 return *tp;
4805 return NULL;
4808 /* Called via walk_gimple_op. Verify locations of expressions. */
4810 static tree
4811 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4813 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4814 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4817 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4819 static void
4820 collect_subblocks (pointer_set_t *blocks, tree block)
4822 tree t;
4823 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4825 pointer_set_insert (blocks, t);
4826 collect_subblocks (blocks, t);
4830 /* Verify the GIMPLE statements in the CFG of FN. */
4832 DEBUG_FUNCTION void
4833 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4835 basic_block bb;
4836 bool err = false;
4837 struct pointer_set_t *visited, *visited_stmts, *blocks;
4839 timevar_push (TV_TREE_STMT_VERIFY);
4840 visited = pointer_set_create ();
4841 visited_stmts = pointer_set_create ();
4843 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4844 blocks = pointer_set_create ();
4845 if (DECL_INITIAL (fn->decl))
4847 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4848 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4851 FOR_EACH_BB_FN (bb, fn)
4853 gimple_stmt_iterator gsi;
4855 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4857 gimple phi = gsi_stmt (gsi);
4858 bool err2 = false;
4859 unsigned i;
4861 pointer_set_insert (visited_stmts, phi);
4863 if (gimple_bb (phi) != bb)
4865 error ("gimple_bb (phi) is set to a wrong basic block");
4866 err2 = true;
4869 err2 |= verify_gimple_phi (phi);
4871 /* Only PHI arguments have locations. */
4872 if (gimple_location (phi) != UNKNOWN_LOCATION)
4874 error ("PHI node with location");
4875 err2 = true;
4878 for (i = 0; i < gimple_phi_num_args (phi); i++)
4880 tree arg = gimple_phi_arg_def (phi, i);
4881 tree addr = walk_tree (&arg, verify_node_sharing_1,
4882 visited, NULL);
4883 if (addr)
4885 error ("incorrect sharing of tree nodes");
4886 debug_generic_expr (addr);
4887 err2 |= true;
4889 location_t loc = gimple_phi_arg_location (phi, i);
4890 if (virtual_operand_p (gimple_phi_result (phi))
4891 && loc != UNKNOWN_LOCATION)
4893 error ("virtual PHI with argument locations");
4894 err2 = true;
4896 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4897 if (addr)
4899 debug_generic_expr (addr);
4900 err2 = true;
4902 err2 |= verify_location (blocks, loc);
4905 if (err2)
4906 debug_gimple_stmt (phi);
4907 err |= err2;
4910 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4912 gimple stmt = gsi_stmt (gsi);
4913 bool err2 = false;
4914 struct walk_stmt_info wi;
4915 tree addr;
4916 int lp_nr;
4918 pointer_set_insert (visited_stmts, stmt);
4920 if (gimple_bb (stmt) != bb)
4922 error ("gimple_bb (stmt) is set to a wrong basic block");
4923 err2 = true;
4926 err2 |= verify_gimple_stmt (stmt);
4927 err2 |= verify_location (blocks, gimple_location (stmt));
4929 memset (&wi, 0, sizeof (wi));
4930 wi.info = (void *) visited;
4931 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4932 if (addr)
4934 error ("incorrect sharing of tree nodes");
4935 debug_generic_expr (addr);
4936 err2 |= true;
4939 memset (&wi, 0, sizeof (wi));
4940 wi.info = (void *) blocks;
4941 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4942 if (addr)
4944 debug_generic_expr (addr);
4945 err2 |= true;
4948 /* ??? Instead of not checking these stmts at all the walker
4949 should know its context via wi. */
4950 if (!is_gimple_debug (stmt)
4951 && !is_gimple_omp (stmt))
4953 memset (&wi, 0, sizeof (wi));
4954 addr = walk_gimple_op (stmt, verify_expr, &wi);
4955 if (addr)
4957 debug_generic_expr (addr);
4958 inform (gimple_location (stmt), "in statement");
4959 err2 |= true;
4963 /* If the statement is marked as part of an EH region, then it is
4964 expected that the statement could throw. Verify that when we
4965 have optimizations that simplify statements such that we prove
4966 that they cannot throw, that we update other data structures
4967 to match. */
4968 lp_nr = lookup_stmt_eh_lp (stmt);
4969 if (lp_nr > 0)
4971 if (!stmt_could_throw_p (stmt))
4973 if (verify_nothrow)
4975 error ("statement marked for throw, but doesn%'t");
4976 err2 |= true;
4979 else if (!gsi_one_before_end_p (gsi))
4981 error ("statement marked for throw in middle of block");
4982 err2 |= true;
4986 if (err2)
4987 debug_gimple_stmt (stmt);
4988 err |= err2;
4992 eh_error_found = false;
4993 if (get_eh_throw_stmt_table (cfun))
4994 htab_traverse (get_eh_throw_stmt_table (cfun),
4995 verify_eh_throw_stmt_node,
4996 visited_stmts);
4998 if (err || eh_error_found)
4999 internal_error ("verify_gimple failed");
5001 pointer_set_destroy (visited);
5002 pointer_set_destroy (visited_stmts);
5003 pointer_set_destroy (blocks);
5004 verify_histograms ();
5005 timevar_pop (TV_TREE_STMT_VERIFY);
5009 /* Verifies that the flow information is OK. */
5011 static int
5012 gimple_verify_flow_info (void)
5014 int err = 0;
5015 basic_block bb;
5016 gimple_stmt_iterator gsi;
5017 gimple stmt;
5018 edge e;
5019 edge_iterator ei;
5021 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5022 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5024 error ("ENTRY_BLOCK has IL associated with it");
5025 err = 1;
5028 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5029 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5031 error ("EXIT_BLOCK has IL associated with it");
5032 err = 1;
5035 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5036 if (e->flags & EDGE_FALLTHRU)
5038 error ("fallthru to exit from bb %d", e->src->index);
5039 err = 1;
5042 FOR_EACH_BB_FN (bb, cfun)
5044 bool found_ctrl_stmt = false;
5046 stmt = NULL;
5048 /* Skip labels on the start of basic block. */
5049 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5051 tree label;
5052 gimple prev_stmt = stmt;
5054 stmt = gsi_stmt (gsi);
5056 if (gimple_code (stmt) != GIMPLE_LABEL)
5057 break;
5059 label = gimple_label_label (stmt);
5060 if (prev_stmt && DECL_NONLOCAL (label))
5062 error ("nonlocal label ");
5063 print_generic_expr (stderr, label, 0);
5064 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5065 bb->index);
5066 err = 1;
5069 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5071 error ("EH landing pad label ");
5072 print_generic_expr (stderr, label, 0);
5073 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5074 bb->index);
5075 err = 1;
5078 if (label_to_block (label) != bb)
5080 error ("label ");
5081 print_generic_expr (stderr, label, 0);
5082 fprintf (stderr, " to block does not match in bb %d",
5083 bb->index);
5084 err = 1;
5087 if (decl_function_context (label) != current_function_decl)
5089 error ("label ");
5090 print_generic_expr (stderr, label, 0);
5091 fprintf (stderr, " has incorrect context in bb %d",
5092 bb->index);
5093 err = 1;
5097 /* Verify that body of basic block BB is free of control flow. */
5098 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5100 gimple stmt = gsi_stmt (gsi);
5102 if (found_ctrl_stmt)
5104 error ("control flow in the middle of basic block %d",
5105 bb->index);
5106 err = 1;
5109 if (stmt_ends_bb_p (stmt))
5110 found_ctrl_stmt = true;
5112 if (gimple_code (stmt) == GIMPLE_LABEL)
5114 error ("label ");
5115 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5116 fprintf (stderr, " in the middle of basic block %d", bb->index);
5117 err = 1;
5121 gsi = gsi_last_bb (bb);
5122 if (gsi_end_p (gsi))
5123 continue;
5125 stmt = gsi_stmt (gsi);
5127 if (gimple_code (stmt) == GIMPLE_LABEL)
5128 continue;
5130 err |= verify_eh_edges (stmt);
5132 if (is_ctrl_stmt (stmt))
5134 FOR_EACH_EDGE (e, ei, bb->succs)
5135 if (e->flags & EDGE_FALLTHRU)
5137 error ("fallthru edge after a control statement in bb %d",
5138 bb->index);
5139 err = 1;
5143 if (gimple_code (stmt) != GIMPLE_COND)
5145 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5146 after anything else but if statement. */
5147 FOR_EACH_EDGE (e, ei, bb->succs)
5148 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5150 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5151 bb->index);
5152 err = 1;
5156 switch (gimple_code (stmt))
5158 case GIMPLE_COND:
5160 edge true_edge;
5161 edge false_edge;
5163 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5165 if (!true_edge
5166 || !false_edge
5167 || !(true_edge->flags & EDGE_TRUE_VALUE)
5168 || !(false_edge->flags & EDGE_FALSE_VALUE)
5169 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5170 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5171 || EDGE_COUNT (bb->succs) >= 3)
5173 error ("wrong outgoing edge flags at end of bb %d",
5174 bb->index);
5175 err = 1;
5178 break;
5180 case GIMPLE_GOTO:
5181 if (simple_goto_p (stmt))
5183 error ("explicit goto at end of bb %d", bb->index);
5184 err = 1;
5186 else
5188 /* FIXME. We should double check that the labels in the
5189 destination blocks have their address taken. */
5190 FOR_EACH_EDGE (e, ei, bb->succs)
5191 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5192 | EDGE_FALSE_VALUE))
5193 || !(e->flags & EDGE_ABNORMAL))
5195 error ("wrong outgoing edge flags at end of bb %d",
5196 bb->index);
5197 err = 1;
5200 break;
5202 case GIMPLE_CALL:
5203 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5204 break;
5205 /* ... fallthru ... */
5206 case GIMPLE_RETURN:
5207 if (!single_succ_p (bb)
5208 || (single_succ_edge (bb)->flags
5209 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5210 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5212 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5213 err = 1;
5215 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5217 error ("return edge does not point to exit in bb %d",
5218 bb->index);
5219 err = 1;
5221 break;
5223 case GIMPLE_SWITCH:
5225 tree prev;
5226 edge e;
5227 size_t i, n;
5229 n = gimple_switch_num_labels (stmt);
5231 /* Mark all the destination basic blocks. */
5232 for (i = 0; i < n; ++i)
5234 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5235 basic_block label_bb = label_to_block (lab);
5236 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5237 label_bb->aux = (void *)1;
5240 /* Verify that the case labels are sorted. */
5241 prev = gimple_switch_label (stmt, 0);
5242 for (i = 1; i < n; ++i)
5244 tree c = gimple_switch_label (stmt, i);
5245 if (!CASE_LOW (c))
5247 error ("found default case not at the start of "
5248 "case vector");
5249 err = 1;
5250 continue;
5252 if (CASE_LOW (prev)
5253 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5255 error ("case labels not sorted: ");
5256 print_generic_expr (stderr, prev, 0);
5257 fprintf (stderr," is greater than ");
5258 print_generic_expr (stderr, c, 0);
5259 fprintf (stderr," but comes before it.\n");
5260 err = 1;
5262 prev = c;
5264 /* VRP will remove the default case if it can prove it will
5265 never be executed. So do not verify there always exists
5266 a default case here. */
5268 FOR_EACH_EDGE (e, ei, bb->succs)
5270 if (!e->dest->aux)
5272 error ("extra outgoing edge %d->%d",
5273 bb->index, e->dest->index);
5274 err = 1;
5277 e->dest->aux = (void *)2;
5278 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5279 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5281 error ("wrong outgoing edge flags at end of bb %d",
5282 bb->index);
5283 err = 1;
5287 /* Check that we have all of them. */
5288 for (i = 0; i < n; ++i)
5290 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5291 basic_block label_bb = label_to_block (lab);
5293 if (label_bb->aux != (void *)2)
5295 error ("missing edge %i->%i", bb->index, label_bb->index);
5296 err = 1;
5300 FOR_EACH_EDGE (e, ei, bb->succs)
5301 e->dest->aux = (void *)0;
5303 break;
5305 case GIMPLE_EH_DISPATCH:
5306 err |= verify_eh_dispatch_edge (stmt);
5307 break;
5309 default:
5310 break;
5314 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5315 verify_dominators (CDI_DOMINATORS);
5317 return err;
5321 /* Updates phi nodes after creating a forwarder block joined
5322 by edge FALLTHRU. */
5324 static void
5325 gimple_make_forwarder_block (edge fallthru)
5327 edge e;
5328 edge_iterator ei;
5329 basic_block dummy, bb;
5330 tree var;
5331 gimple_stmt_iterator gsi;
5333 dummy = fallthru->src;
5334 bb = fallthru->dest;
5336 if (single_pred_p (bb))
5337 return;
5339 /* If we redirected a branch we must create new PHI nodes at the
5340 start of BB. */
5341 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5343 gimple phi, new_phi;
5345 phi = gsi_stmt (gsi);
5346 var = gimple_phi_result (phi);
5347 new_phi = create_phi_node (var, bb);
5348 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5349 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5350 UNKNOWN_LOCATION);
5353 /* Add the arguments we have stored on edges. */
5354 FOR_EACH_EDGE (e, ei, bb->preds)
5356 if (e == fallthru)
5357 continue;
5359 flush_pending_stmts (e);
5364 /* Return a non-special label in the head of basic block BLOCK.
5365 Create one if it doesn't exist. */
5367 tree
5368 gimple_block_label (basic_block bb)
5370 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5371 bool first = true;
5372 tree label;
5373 gimple stmt;
5375 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5377 stmt = gsi_stmt (i);
5378 if (gimple_code (stmt) != GIMPLE_LABEL)
5379 break;
5380 label = gimple_label_label (stmt);
5381 if (!DECL_NONLOCAL (label))
5383 if (!first)
5384 gsi_move_before (&i, &s);
5385 return label;
5389 label = create_artificial_label (UNKNOWN_LOCATION);
5390 stmt = gimple_build_label (label);
5391 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5392 return label;
5396 /* Attempt to perform edge redirection by replacing a possibly complex
5397 jump instruction by a goto or by removing the jump completely.
5398 This can apply only if all edges now point to the same block. The
5399 parameters and return values are equivalent to
5400 redirect_edge_and_branch. */
5402 static edge
5403 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5405 basic_block src = e->src;
5406 gimple_stmt_iterator i;
5407 gimple stmt;
5409 /* We can replace or remove a complex jump only when we have exactly
5410 two edges. */
5411 if (EDGE_COUNT (src->succs) != 2
5412 /* Verify that all targets will be TARGET. Specifically, the
5413 edge that is not E must also go to TARGET. */
5414 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5415 return NULL;
5417 i = gsi_last_bb (src);
5418 if (gsi_end_p (i))
5419 return NULL;
5421 stmt = gsi_stmt (i);
5423 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5425 gsi_remove (&i, true);
5426 e = ssa_redirect_edge (e, target);
5427 e->flags = EDGE_FALLTHRU;
5428 return e;
5431 return NULL;
5435 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5436 edge representing the redirected branch. */
5438 static edge
5439 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5441 basic_block bb = e->src;
5442 gimple_stmt_iterator gsi;
5443 edge ret;
5444 gimple stmt;
5446 if (e->flags & EDGE_ABNORMAL)
5447 return NULL;
5449 if (e->dest == dest)
5450 return NULL;
5452 if (e->flags & EDGE_EH)
5453 return redirect_eh_edge (e, dest);
5455 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5457 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5458 if (ret)
5459 return ret;
5462 gsi = gsi_last_bb (bb);
5463 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5465 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5467 case GIMPLE_COND:
5468 /* For COND_EXPR, we only need to redirect the edge. */
5469 break;
5471 case GIMPLE_GOTO:
5472 /* No non-abnormal edges should lead from a non-simple goto, and
5473 simple ones should be represented implicitly. */
5474 gcc_unreachable ();
5476 case GIMPLE_SWITCH:
5478 tree label = gimple_block_label (dest);
5479 tree cases = get_cases_for_edge (e, stmt);
5481 /* If we have a list of cases associated with E, then use it
5482 as it's a lot faster than walking the entire case vector. */
5483 if (cases)
5485 edge e2 = find_edge (e->src, dest);
5486 tree last, first;
5488 first = cases;
5489 while (cases)
5491 last = cases;
5492 CASE_LABEL (cases) = label;
5493 cases = CASE_CHAIN (cases);
5496 /* If there was already an edge in the CFG, then we need
5497 to move all the cases associated with E to E2. */
5498 if (e2)
5500 tree cases2 = get_cases_for_edge (e2, stmt);
5502 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5503 CASE_CHAIN (cases2) = first;
5505 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5507 else
5509 size_t i, n = gimple_switch_num_labels (stmt);
5511 for (i = 0; i < n; i++)
5513 tree elt = gimple_switch_label (stmt, i);
5514 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5515 CASE_LABEL (elt) = label;
5519 break;
5521 case GIMPLE_ASM:
5523 int i, n = gimple_asm_nlabels (stmt);
5524 tree label = NULL;
5526 for (i = 0; i < n; ++i)
5528 tree cons = gimple_asm_label_op (stmt, i);
5529 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5531 if (!label)
5532 label = gimple_block_label (dest);
5533 TREE_VALUE (cons) = label;
5537 /* If we didn't find any label matching the former edge in the
5538 asm labels, we must be redirecting the fallthrough
5539 edge. */
5540 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5542 break;
5544 case GIMPLE_RETURN:
5545 gsi_remove (&gsi, true);
5546 e->flags |= EDGE_FALLTHRU;
5547 break;
5549 case GIMPLE_OMP_RETURN:
5550 case GIMPLE_OMP_CONTINUE:
5551 case GIMPLE_OMP_SECTIONS_SWITCH:
5552 case GIMPLE_OMP_FOR:
5553 /* The edges from OMP constructs can be simply redirected. */
5554 break;
5556 case GIMPLE_EH_DISPATCH:
5557 if (!(e->flags & EDGE_FALLTHRU))
5558 redirect_eh_dispatch_edge (stmt, e, dest);
5559 break;
5561 case GIMPLE_TRANSACTION:
5562 /* The ABORT edge has a stored label associated with it, otherwise
5563 the edges are simply redirectable. */
5564 if (e->flags == 0)
5565 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5566 break;
5568 default:
5569 /* Otherwise it must be a fallthru edge, and we don't need to
5570 do anything besides redirecting it. */
5571 gcc_assert (e->flags & EDGE_FALLTHRU);
5572 break;
5575 /* Update/insert PHI nodes as necessary. */
5577 /* Now update the edges in the CFG. */
5578 e = ssa_redirect_edge (e, dest);
5580 return e;
5583 /* Returns true if it is possible to remove edge E by redirecting
5584 it to the destination of the other edge from E->src. */
5586 static bool
5587 gimple_can_remove_branch_p (const_edge e)
5589 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5590 return false;
5592 return true;
5595 /* Simple wrapper, as we can always redirect fallthru edges. */
5597 static basic_block
5598 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5600 e = gimple_redirect_edge_and_branch (e, dest);
5601 gcc_assert (e);
5603 return NULL;
5607 /* Splits basic block BB after statement STMT (but at least after the
5608 labels). If STMT is NULL, BB is split just after the labels. */
5610 static basic_block
5611 gimple_split_block (basic_block bb, void *stmt)
5613 gimple_stmt_iterator gsi;
5614 gimple_stmt_iterator gsi_tgt;
5615 gimple act;
5616 gimple_seq list;
5617 basic_block new_bb;
5618 edge e;
5619 edge_iterator ei;
5621 new_bb = create_empty_bb (bb);
5623 /* Redirect the outgoing edges. */
5624 new_bb->succs = bb->succs;
5625 bb->succs = NULL;
5626 FOR_EACH_EDGE (e, ei, new_bb->succs)
5627 e->src = new_bb;
5629 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5630 stmt = NULL;
5632 /* Move everything from GSI to the new basic block. */
5633 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5635 act = gsi_stmt (gsi);
5636 if (gimple_code (act) == GIMPLE_LABEL)
5637 continue;
5639 if (!stmt)
5640 break;
5642 if (stmt == act)
5644 gsi_next (&gsi);
5645 break;
5649 if (gsi_end_p (gsi))
5650 return new_bb;
5652 /* Split the statement list - avoid re-creating new containers as this
5653 brings ugly quadratic memory consumption in the inliner.
5654 (We are still quadratic since we need to update stmt BB pointers,
5655 sadly.) */
5656 gsi_split_seq_before (&gsi, &list);
5657 set_bb_seq (new_bb, list);
5658 for (gsi_tgt = gsi_start (list);
5659 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5660 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5662 return new_bb;
5666 /* Moves basic block BB after block AFTER. */
5668 static bool
5669 gimple_move_block_after (basic_block bb, basic_block after)
5671 if (bb->prev_bb == after)
5672 return true;
5674 unlink_block (bb);
5675 link_block (bb, after);
5677 return true;
5681 /* Return TRUE if block BB has no executable statements, otherwise return
5682 FALSE. */
5684 static bool
5685 gimple_empty_block_p (basic_block bb)
5687 /* BB must have no executable statements. */
5688 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5689 if (phi_nodes (bb))
5690 return false;
5691 if (gsi_end_p (gsi))
5692 return true;
5693 if (is_gimple_debug (gsi_stmt (gsi)))
5694 gsi_next_nondebug (&gsi);
5695 return gsi_end_p (gsi);
5699 /* Split a basic block if it ends with a conditional branch and if the
5700 other part of the block is not empty. */
5702 static basic_block
5703 gimple_split_block_before_cond_jump (basic_block bb)
5705 gimple last, split_point;
5706 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5707 if (gsi_end_p (gsi))
5708 return NULL;
5709 last = gsi_stmt (gsi);
5710 if (gimple_code (last) != GIMPLE_COND
5711 && gimple_code (last) != GIMPLE_SWITCH)
5712 return NULL;
5713 gsi_prev_nondebug (&gsi);
5714 split_point = gsi_stmt (gsi);
5715 return split_block (bb, split_point)->dest;
5719 /* Return true if basic_block can be duplicated. */
5721 static bool
5722 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5724 return true;
5727 /* Create a duplicate of the basic block BB. NOTE: This does not
5728 preserve SSA form. */
5730 static basic_block
5731 gimple_duplicate_bb (basic_block bb)
5733 basic_block new_bb;
5734 gimple_stmt_iterator gsi, gsi_tgt;
5735 gimple_seq phis = phi_nodes (bb);
5736 gimple phi, stmt, copy;
5738 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5740 /* Copy the PHI nodes. We ignore PHI node arguments here because
5741 the incoming edges have not been setup yet. */
5742 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5744 phi = gsi_stmt (gsi);
5745 copy = create_phi_node (NULL_TREE, new_bb);
5746 create_new_def_for (gimple_phi_result (phi), copy,
5747 gimple_phi_result_ptr (copy));
5748 gimple_set_uid (copy, gimple_uid (phi));
5751 gsi_tgt = gsi_start_bb (new_bb);
5752 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5754 def_operand_p def_p;
5755 ssa_op_iter op_iter;
5756 tree lhs;
5758 stmt = gsi_stmt (gsi);
5759 if (gimple_code (stmt) == GIMPLE_LABEL)
5760 continue;
5762 /* Don't duplicate label debug stmts. */
5763 if (gimple_debug_bind_p (stmt)
5764 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5765 == LABEL_DECL)
5766 continue;
5768 /* Create a new copy of STMT and duplicate STMT's virtual
5769 operands. */
5770 copy = gimple_copy (stmt);
5771 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5773 maybe_duplicate_eh_stmt (copy, stmt);
5774 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5776 /* When copying around a stmt writing into a local non-user
5777 aggregate, make sure it won't share stack slot with other
5778 vars. */
5779 lhs = gimple_get_lhs (stmt);
5780 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5782 tree base = get_base_address (lhs);
5783 if (base
5784 && (TREE_CODE (base) == VAR_DECL
5785 || TREE_CODE (base) == RESULT_DECL)
5786 && DECL_IGNORED_P (base)
5787 && !TREE_STATIC (base)
5788 && !DECL_EXTERNAL (base)
5789 && (TREE_CODE (base) != VAR_DECL
5790 || !DECL_HAS_VALUE_EXPR_P (base)))
5791 DECL_NONSHAREABLE (base) = 1;
5794 /* Create new names for all the definitions created by COPY and
5795 add replacement mappings for each new name. */
5796 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5797 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5800 return new_bb;
5803 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5805 static void
5806 add_phi_args_after_copy_edge (edge e_copy)
5808 basic_block bb, bb_copy = e_copy->src, dest;
5809 edge e;
5810 edge_iterator ei;
5811 gimple phi, phi_copy;
5812 tree def;
5813 gimple_stmt_iterator psi, psi_copy;
5815 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5816 return;
5818 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5820 if (e_copy->dest->flags & BB_DUPLICATED)
5821 dest = get_bb_original (e_copy->dest);
5822 else
5823 dest = e_copy->dest;
5825 e = find_edge (bb, dest);
5826 if (!e)
5828 /* During loop unrolling the target of the latch edge is copied.
5829 In this case we are not looking for edge to dest, but to
5830 duplicated block whose original was dest. */
5831 FOR_EACH_EDGE (e, ei, bb->succs)
5833 if ((e->dest->flags & BB_DUPLICATED)
5834 && get_bb_original (e->dest) == dest)
5835 break;
5838 gcc_assert (e != NULL);
5841 for (psi = gsi_start_phis (e->dest),
5842 psi_copy = gsi_start_phis (e_copy->dest);
5843 !gsi_end_p (psi);
5844 gsi_next (&psi), gsi_next (&psi_copy))
5846 phi = gsi_stmt (psi);
5847 phi_copy = gsi_stmt (psi_copy);
5848 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5849 add_phi_arg (phi_copy, def, e_copy,
5850 gimple_phi_arg_location_from_edge (phi, e));
5855 /* Basic block BB_COPY was created by code duplication. Add phi node
5856 arguments for edges going out of BB_COPY. The blocks that were
5857 duplicated have BB_DUPLICATED set. */
5859 void
5860 add_phi_args_after_copy_bb (basic_block bb_copy)
5862 edge e_copy;
5863 edge_iterator ei;
5865 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5867 add_phi_args_after_copy_edge (e_copy);
5871 /* Blocks in REGION_COPY array of length N_REGION were created by
5872 duplication of basic blocks. Add phi node arguments for edges
5873 going from these blocks. If E_COPY is not NULL, also add
5874 phi node arguments for its destination.*/
5876 void
5877 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5878 edge e_copy)
5880 unsigned i;
5882 for (i = 0; i < n_region; i++)
5883 region_copy[i]->flags |= BB_DUPLICATED;
5885 for (i = 0; i < n_region; i++)
5886 add_phi_args_after_copy_bb (region_copy[i]);
5887 if (e_copy)
5888 add_phi_args_after_copy_edge (e_copy);
5890 for (i = 0; i < n_region; i++)
5891 region_copy[i]->flags &= ~BB_DUPLICATED;
5894 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5895 important exit edge EXIT. By important we mean that no SSA name defined
5896 inside region is live over the other exit edges of the region. All entry
5897 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5898 to the duplicate of the region. Dominance and loop information is
5899 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5900 UPDATE_DOMINANCE is false then we assume that the caller will update the
5901 dominance information after calling this function. The new basic
5902 blocks are stored to REGION_COPY in the same order as they had in REGION,
5903 provided that REGION_COPY is not NULL.
5904 The function returns false if it is unable to copy the region,
5905 true otherwise. */
5907 bool
5908 gimple_duplicate_sese_region (edge entry, edge exit,
5909 basic_block *region, unsigned n_region,
5910 basic_block *region_copy,
5911 bool update_dominance)
5913 unsigned i;
5914 bool free_region_copy = false, copying_header = false;
5915 struct loop *loop = entry->dest->loop_father;
5916 edge exit_copy;
5917 vec<basic_block> doms;
5918 edge redirected;
5919 int total_freq = 0, entry_freq = 0;
5920 gcov_type total_count = 0, entry_count = 0;
5922 if (!can_copy_bbs_p (region, n_region))
5923 return false;
5925 /* Some sanity checking. Note that we do not check for all possible
5926 missuses of the functions. I.e. if you ask to copy something weird,
5927 it will work, but the state of structures probably will not be
5928 correct. */
5929 for (i = 0; i < n_region; i++)
5931 /* We do not handle subloops, i.e. all the blocks must belong to the
5932 same loop. */
5933 if (region[i]->loop_father != loop)
5934 return false;
5936 if (region[i] != entry->dest
5937 && region[i] == loop->header)
5938 return false;
5941 /* In case the function is used for loop header copying (which is the primary
5942 use), ensure that EXIT and its copy will be new latch and entry edges. */
5943 if (loop->header == entry->dest)
5945 copying_header = true;
5947 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5948 return false;
5950 for (i = 0; i < n_region; i++)
5951 if (region[i] != exit->src
5952 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5953 return false;
5956 initialize_original_copy_tables ();
5958 if (copying_header)
5959 set_loop_copy (loop, loop_outer (loop));
5960 else
5961 set_loop_copy (loop, loop);
5963 if (!region_copy)
5965 region_copy = XNEWVEC (basic_block, n_region);
5966 free_region_copy = true;
5969 /* Record blocks outside the region that are dominated by something
5970 inside. */
5971 if (update_dominance)
5973 doms.create (0);
5974 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5977 if (entry->dest->count)
5979 total_count = entry->dest->count;
5980 entry_count = entry->count;
5981 /* Fix up corner cases, to avoid division by zero or creation of negative
5982 frequencies. */
5983 if (entry_count > total_count)
5984 entry_count = total_count;
5986 else
5988 total_freq = entry->dest->frequency;
5989 entry_freq = EDGE_FREQUENCY (entry);
5990 /* Fix up corner cases, to avoid division by zero or creation of negative
5991 frequencies. */
5992 if (total_freq == 0)
5993 total_freq = 1;
5994 else if (entry_freq > total_freq)
5995 entry_freq = total_freq;
5998 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5999 split_edge_bb_loc (entry), update_dominance);
6000 if (total_count)
6002 scale_bbs_frequencies_gcov_type (region, n_region,
6003 total_count - entry_count,
6004 total_count);
6005 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6006 total_count);
6008 else
6010 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6011 total_freq);
6012 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6015 if (copying_header)
6017 loop->header = exit->dest;
6018 loop->latch = exit->src;
6021 /* Redirect the entry and add the phi node arguments. */
6022 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6023 gcc_assert (redirected != NULL);
6024 flush_pending_stmts (entry);
6026 /* Concerning updating of dominators: We must recount dominators
6027 for entry block and its copy. Anything that is outside of the
6028 region, but was dominated by something inside needs recounting as
6029 well. */
6030 if (update_dominance)
6032 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6033 doms.safe_push (get_bb_original (entry->dest));
6034 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6035 doms.release ();
6038 /* Add the other PHI node arguments. */
6039 add_phi_args_after_copy (region_copy, n_region, NULL);
6041 if (free_region_copy)
6042 free (region_copy);
6044 free_original_copy_tables ();
6045 return true;
6048 /* Checks if BB is part of the region defined by N_REGION BBS. */
6049 static bool
6050 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6052 unsigned int n;
6054 for (n = 0; n < n_region; n++)
6056 if (bb == bbs[n])
6057 return true;
6059 return false;
6062 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6063 are stored to REGION_COPY in the same order in that they appear
6064 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6065 the region, EXIT an exit from it. The condition guarding EXIT
6066 is moved to ENTRY. Returns true if duplication succeeds, false
6067 otherwise.
6069 For example,
6071 some_code;
6072 if (cond)
6074 else
6077 is transformed to
6079 if (cond)
6081 some_code;
6084 else
6086 some_code;
6091 bool
6092 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6093 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6094 basic_block *region_copy ATTRIBUTE_UNUSED)
6096 unsigned i;
6097 bool free_region_copy = false;
6098 struct loop *loop = exit->dest->loop_father;
6099 struct loop *orig_loop = entry->dest->loop_father;
6100 basic_block switch_bb, entry_bb, nentry_bb;
6101 vec<basic_block> doms;
6102 int total_freq = 0, exit_freq = 0;
6103 gcov_type total_count = 0, exit_count = 0;
6104 edge exits[2], nexits[2], e;
6105 gimple_stmt_iterator gsi;
6106 gimple cond_stmt;
6107 edge sorig, snew;
6108 basic_block exit_bb;
6109 gimple_stmt_iterator psi;
6110 gimple phi;
6111 tree def;
6112 struct loop *target, *aloop, *cloop;
6114 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6115 exits[0] = exit;
6116 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6118 if (!can_copy_bbs_p (region, n_region))
6119 return false;
6121 initialize_original_copy_tables ();
6122 set_loop_copy (orig_loop, loop);
6124 target= loop;
6125 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6127 if (bb_part_of_region_p (aloop->header, region, n_region))
6129 cloop = duplicate_loop (aloop, target);
6130 duplicate_subloops (aloop, cloop);
6134 if (!region_copy)
6136 region_copy = XNEWVEC (basic_block, n_region);
6137 free_region_copy = true;
6140 gcc_assert (!need_ssa_update_p (cfun));
6142 /* Record blocks outside the region that are dominated by something
6143 inside. */
6144 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6146 if (exit->src->count)
6148 total_count = exit->src->count;
6149 exit_count = exit->count;
6150 /* Fix up corner cases, to avoid division by zero or creation of negative
6151 frequencies. */
6152 if (exit_count > total_count)
6153 exit_count = total_count;
6155 else
6157 total_freq = exit->src->frequency;
6158 exit_freq = EDGE_FREQUENCY (exit);
6159 /* Fix up corner cases, to avoid division by zero or creation of negative
6160 frequencies. */
6161 if (total_freq == 0)
6162 total_freq = 1;
6163 if (exit_freq > total_freq)
6164 exit_freq = total_freq;
6167 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6168 split_edge_bb_loc (exit), true);
6169 if (total_count)
6171 scale_bbs_frequencies_gcov_type (region, n_region,
6172 total_count - exit_count,
6173 total_count);
6174 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6175 total_count);
6177 else
6179 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6180 total_freq);
6181 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6184 /* Create the switch block, and put the exit condition to it. */
6185 entry_bb = entry->dest;
6186 nentry_bb = get_bb_copy (entry_bb);
6187 if (!last_stmt (entry->src)
6188 || !stmt_ends_bb_p (last_stmt (entry->src)))
6189 switch_bb = entry->src;
6190 else
6191 switch_bb = split_edge (entry);
6192 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6194 gsi = gsi_last_bb (switch_bb);
6195 cond_stmt = last_stmt (exit->src);
6196 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6197 cond_stmt = gimple_copy (cond_stmt);
6199 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6201 sorig = single_succ_edge (switch_bb);
6202 sorig->flags = exits[1]->flags;
6203 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6205 /* Register the new edge from SWITCH_BB in loop exit lists. */
6206 rescan_loop_exit (snew, true, false);
6208 /* Add the PHI node arguments. */
6209 add_phi_args_after_copy (region_copy, n_region, snew);
6211 /* Get rid of now superfluous conditions and associated edges (and phi node
6212 arguments). */
6213 exit_bb = exit->dest;
6215 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6216 PENDING_STMT (e) = NULL;
6218 /* The latch of ORIG_LOOP was copied, and so was the backedge
6219 to the original header. We redirect this backedge to EXIT_BB. */
6220 for (i = 0; i < n_region; i++)
6221 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6223 gcc_assert (single_succ_edge (region_copy[i]));
6224 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6225 PENDING_STMT (e) = NULL;
6226 for (psi = gsi_start_phis (exit_bb);
6227 !gsi_end_p (psi);
6228 gsi_next (&psi))
6230 phi = gsi_stmt (psi);
6231 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6232 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6235 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6236 PENDING_STMT (e) = NULL;
6238 /* Anything that is outside of the region, but was dominated by something
6239 inside needs to update dominance info. */
6240 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6241 doms.release ();
6242 /* Update the SSA web. */
6243 update_ssa (TODO_update_ssa);
6245 if (free_region_copy)
6246 free (region_copy);
6248 free_original_copy_tables ();
6249 return true;
6252 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6253 adding blocks when the dominator traversal reaches EXIT. This
6254 function silently assumes that ENTRY strictly dominates EXIT. */
6256 void
6257 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6258 vec<basic_block> *bbs_p)
6260 basic_block son;
6262 for (son = first_dom_son (CDI_DOMINATORS, entry);
6263 son;
6264 son = next_dom_son (CDI_DOMINATORS, son))
6266 bbs_p->safe_push (son);
6267 if (son != exit)
6268 gather_blocks_in_sese_region (son, exit, bbs_p);
6272 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6273 The duplicates are recorded in VARS_MAP. */
6275 static void
6276 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6277 tree to_context)
6279 tree t = *tp, new_t;
6280 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6281 void **loc;
6283 if (DECL_CONTEXT (t) == to_context)
6284 return;
6286 loc = pointer_map_contains (vars_map, t);
6288 if (!loc)
6290 loc = pointer_map_insert (vars_map, t);
6292 if (SSA_VAR_P (t))
6294 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6295 add_local_decl (f, new_t);
6297 else
6299 gcc_assert (TREE_CODE (t) == CONST_DECL);
6300 new_t = copy_node (t);
6302 DECL_CONTEXT (new_t) = to_context;
6304 *loc = new_t;
6306 else
6307 new_t = (tree) *loc;
6309 *tp = new_t;
6313 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6314 VARS_MAP maps old ssa names and var_decls to the new ones. */
6316 static tree
6317 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6318 tree to_context)
6320 void **loc;
6321 tree new_name;
6323 gcc_assert (!virtual_operand_p (name));
6325 loc = pointer_map_contains (vars_map, name);
6327 if (!loc)
6329 tree decl = SSA_NAME_VAR (name);
6330 if (decl)
6332 replace_by_duplicate_decl (&decl, vars_map, to_context);
6333 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6334 decl, SSA_NAME_DEF_STMT (name));
6335 if (SSA_NAME_IS_DEFAULT_DEF (name))
6336 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6337 decl, new_name);
6339 else
6340 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6341 name, SSA_NAME_DEF_STMT (name));
6343 loc = pointer_map_insert (vars_map, name);
6344 *loc = new_name;
6346 else
6347 new_name = (tree) *loc;
6349 return new_name;
6352 struct move_stmt_d
6354 tree orig_block;
6355 tree new_block;
6356 tree from_context;
6357 tree to_context;
6358 struct pointer_map_t *vars_map;
6359 htab_t new_label_map;
6360 struct pointer_map_t *eh_map;
6361 bool remap_decls_p;
6364 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6365 contained in *TP if it has been ORIG_BLOCK previously and change the
6366 DECL_CONTEXT of every local variable referenced in *TP. */
6368 static tree
6369 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6371 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6372 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6373 tree t = *tp;
6375 if (EXPR_P (t))
6377 tree block = TREE_BLOCK (t);
6378 if (block == p->orig_block
6379 || (p->orig_block == NULL_TREE
6380 && block != NULL_TREE))
6381 TREE_SET_BLOCK (t, p->new_block);
6382 #ifdef ENABLE_CHECKING
6383 else if (block != NULL_TREE)
6385 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6386 block = BLOCK_SUPERCONTEXT (block);
6387 gcc_assert (block == p->orig_block);
6389 #endif
6391 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6393 if (TREE_CODE (t) == SSA_NAME)
6394 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6395 else if (TREE_CODE (t) == LABEL_DECL)
6397 if (p->new_label_map)
6399 struct tree_map in, *out;
6400 in.base.from = t;
6401 out = (struct tree_map *)
6402 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6403 if (out)
6404 *tp = t = out->to;
6407 DECL_CONTEXT (t) = p->to_context;
6409 else if (p->remap_decls_p)
6411 /* Replace T with its duplicate. T should no longer appear in the
6412 parent function, so this looks wasteful; however, it may appear
6413 in referenced_vars, and more importantly, as virtual operands of
6414 statements, and in alias lists of other variables. It would be
6415 quite difficult to expunge it from all those places. ??? It might
6416 suffice to do this for addressable variables. */
6417 if ((TREE_CODE (t) == VAR_DECL
6418 && !is_global_var (t))
6419 || TREE_CODE (t) == CONST_DECL)
6420 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6422 *walk_subtrees = 0;
6424 else if (TYPE_P (t))
6425 *walk_subtrees = 0;
6427 return NULL_TREE;
6430 /* Helper for move_stmt_r. Given an EH region number for the source
6431 function, map that to the duplicate EH regio number in the dest. */
6433 static int
6434 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6436 eh_region old_r, new_r;
6437 void **slot;
6439 old_r = get_eh_region_from_number (old_nr);
6440 slot = pointer_map_contains (p->eh_map, old_r);
6441 new_r = (eh_region) *slot;
6443 return new_r->index;
6446 /* Similar, but operate on INTEGER_CSTs. */
6448 static tree
6449 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6451 int old_nr, new_nr;
6453 old_nr = tree_to_shwi (old_t_nr);
6454 new_nr = move_stmt_eh_region_nr (old_nr, p);
6456 return build_int_cst (integer_type_node, new_nr);
6459 /* Like move_stmt_op, but for gimple statements.
6461 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6462 contained in the current statement in *GSI_P and change the
6463 DECL_CONTEXT of every local variable referenced in the current
6464 statement. */
6466 static tree
6467 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6468 struct walk_stmt_info *wi)
6470 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6471 gimple stmt = gsi_stmt (*gsi_p);
6472 tree block = gimple_block (stmt);
6474 if (block == p->orig_block
6475 || (p->orig_block == NULL_TREE
6476 && block != NULL_TREE))
6477 gimple_set_block (stmt, p->new_block);
6479 switch (gimple_code (stmt))
6481 case GIMPLE_CALL:
6482 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6484 tree r, fndecl = gimple_call_fndecl (stmt);
6485 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6486 switch (DECL_FUNCTION_CODE (fndecl))
6488 case BUILT_IN_EH_COPY_VALUES:
6489 r = gimple_call_arg (stmt, 1);
6490 r = move_stmt_eh_region_tree_nr (r, p);
6491 gimple_call_set_arg (stmt, 1, r);
6492 /* FALLTHRU */
6494 case BUILT_IN_EH_POINTER:
6495 case BUILT_IN_EH_FILTER:
6496 r = gimple_call_arg (stmt, 0);
6497 r = move_stmt_eh_region_tree_nr (r, p);
6498 gimple_call_set_arg (stmt, 0, r);
6499 break;
6501 default:
6502 break;
6505 break;
6507 case GIMPLE_RESX:
6509 int r = gimple_resx_region (stmt);
6510 r = move_stmt_eh_region_nr (r, p);
6511 gimple_resx_set_region (stmt, r);
6513 break;
6515 case GIMPLE_EH_DISPATCH:
6517 int r = gimple_eh_dispatch_region (stmt);
6518 r = move_stmt_eh_region_nr (r, p);
6519 gimple_eh_dispatch_set_region (stmt, r);
6521 break;
6523 case GIMPLE_OMP_RETURN:
6524 case GIMPLE_OMP_CONTINUE:
6525 break;
6526 default:
6527 if (is_gimple_omp (stmt))
6529 /* Do not remap variables inside OMP directives. Variables
6530 referenced in clauses and directive header belong to the
6531 parent function and should not be moved into the child
6532 function. */
6533 bool save_remap_decls_p = p->remap_decls_p;
6534 p->remap_decls_p = false;
6535 *handled_ops_p = true;
6537 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6538 move_stmt_op, wi);
6540 p->remap_decls_p = save_remap_decls_p;
6542 break;
6545 return NULL_TREE;
6548 /* Move basic block BB from function CFUN to function DEST_FN. The
6549 block is moved out of the original linked list and placed after
6550 block AFTER in the new list. Also, the block is removed from the
6551 original array of blocks and placed in DEST_FN's array of blocks.
6552 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6553 updated to reflect the moved edges.
6555 The local variables are remapped to new instances, VARS_MAP is used
6556 to record the mapping. */
6558 static void
6559 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6560 basic_block after, bool update_edge_count_p,
6561 struct move_stmt_d *d)
6563 struct control_flow_graph *cfg;
6564 edge_iterator ei;
6565 edge e;
6566 gimple_stmt_iterator si;
6567 unsigned old_len, new_len;
6569 /* Remove BB from dominance structures. */
6570 delete_from_dominance_info (CDI_DOMINATORS, bb);
6572 /* Move BB from its current loop to the copy in the new function. */
6573 if (current_loops)
6575 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6576 if (new_loop)
6577 bb->loop_father = new_loop;
6580 /* Link BB to the new linked list. */
6581 move_block_after (bb, after);
6583 /* Update the edge count in the corresponding flowgraphs. */
6584 if (update_edge_count_p)
6585 FOR_EACH_EDGE (e, ei, bb->succs)
6587 cfun->cfg->x_n_edges--;
6588 dest_cfun->cfg->x_n_edges++;
6591 /* Remove BB from the original basic block array. */
6592 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6593 cfun->cfg->x_n_basic_blocks--;
6595 /* Grow DEST_CFUN's basic block array if needed. */
6596 cfg = dest_cfun->cfg;
6597 cfg->x_n_basic_blocks++;
6598 if (bb->index >= cfg->x_last_basic_block)
6599 cfg->x_last_basic_block = bb->index + 1;
6601 old_len = vec_safe_length (cfg->x_basic_block_info);
6602 if ((unsigned) cfg->x_last_basic_block >= old_len)
6604 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6605 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6608 (*cfg->x_basic_block_info)[bb->index] = bb;
6610 /* Remap the variables in phi nodes. */
6611 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6613 gimple phi = gsi_stmt (si);
6614 use_operand_p use;
6615 tree op = PHI_RESULT (phi);
6616 ssa_op_iter oi;
6617 unsigned i;
6619 if (virtual_operand_p (op))
6621 /* Remove the phi nodes for virtual operands (alias analysis will be
6622 run for the new function, anyway). */
6623 remove_phi_node (&si, true);
6624 continue;
6627 SET_PHI_RESULT (phi,
6628 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6629 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6631 op = USE_FROM_PTR (use);
6632 if (TREE_CODE (op) == SSA_NAME)
6633 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6636 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6638 location_t locus = gimple_phi_arg_location (phi, i);
6639 tree block = LOCATION_BLOCK (locus);
6641 if (locus == UNKNOWN_LOCATION)
6642 continue;
6643 if (d->orig_block == NULL_TREE || block == d->orig_block)
6645 if (d->new_block == NULL_TREE)
6646 locus = LOCATION_LOCUS (locus);
6647 else
6648 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6649 gimple_phi_arg_set_location (phi, i, locus);
6653 gsi_next (&si);
6656 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6658 gimple stmt = gsi_stmt (si);
6659 struct walk_stmt_info wi;
6661 memset (&wi, 0, sizeof (wi));
6662 wi.info = d;
6663 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6665 if (gimple_code (stmt) == GIMPLE_LABEL)
6667 tree label = gimple_label_label (stmt);
6668 int uid = LABEL_DECL_UID (label);
6670 gcc_assert (uid > -1);
6672 old_len = vec_safe_length (cfg->x_label_to_block_map);
6673 if (old_len <= (unsigned) uid)
6675 new_len = 3 * uid / 2 + 1;
6676 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6679 (*cfg->x_label_to_block_map)[uid] = bb;
6680 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6682 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6684 if (uid >= dest_cfun->cfg->last_label_uid)
6685 dest_cfun->cfg->last_label_uid = uid + 1;
6688 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6689 remove_stmt_from_eh_lp_fn (cfun, stmt);
6691 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6692 gimple_remove_stmt_histograms (cfun, stmt);
6694 /* We cannot leave any operands allocated from the operand caches of
6695 the current function. */
6696 free_stmt_operands (cfun, stmt);
6697 push_cfun (dest_cfun);
6698 update_stmt (stmt);
6699 pop_cfun ();
6702 FOR_EACH_EDGE (e, ei, bb->succs)
6703 if (e->goto_locus != UNKNOWN_LOCATION)
6705 tree block = LOCATION_BLOCK (e->goto_locus);
6706 if (d->orig_block == NULL_TREE
6707 || block == d->orig_block)
6708 e->goto_locus = d->new_block ?
6709 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6710 LOCATION_LOCUS (e->goto_locus);
6714 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6715 the outermost EH region. Use REGION as the incoming base EH region. */
6717 static eh_region
6718 find_outermost_region_in_block (struct function *src_cfun,
6719 basic_block bb, eh_region region)
6721 gimple_stmt_iterator si;
6723 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6725 gimple stmt = gsi_stmt (si);
6726 eh_region stmt_region;
6727 int lp_nr;
6729 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6730 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6731 if (stmt_region)
6733 if (region == NULL)
6734 region = stmt_region;
6735 else if (stmt_region != region)
6737 region = eh_region_outermost (src_cfun, stmt_region, region);
6738 gcc_assert (region != NULL);
6743 return region;
6746 static tree
6747 new_label_mapper (tree decl, void *data)
6749 htab_t hash = (htab_t) data;
6750 struct tree_map *m;
6751 void **slot;
6753 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6755 m = XNEW (struct tree_map);
6756 m->hash = DECL_UID (decl);
6757 m->base.from = decl;
6758 m->to = create_artificial_label (UNKNOWN_LOCATION);
6759 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6760 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6761 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6763 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6764 gcc_assert (*slot == NULL);
6766 *slot = m;
6768 return m->to;
6771 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6772 subblocks. */
6774 static void
6775 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6776 tree to_context)
6778 tree *tp, t;
6780 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6782 t = *tp;
6783 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6784 continue;
6785 replace_by_duplicate_decl (&t, vars_map, to_context);
6786 if (t != *tp)
6788 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6790 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6791 DECL_HAS_VALUE_EXPR_P (t) = 1;
6793 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6794 *tp = t;
6798 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6799 replace_block_vars_by_duplicates (block, vars_map, to_context);
6802 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6803 from FN1 to FN2. */
6805 static void
6806 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6807 struct loop *loop)
6809 /* Discard it from the old loop array. */
6810 (*get_loops (fn1))[loop->num] = NULL;
6812 /* Place it in the new loop array, assigning it a new number. */
6813 loop->num = number_of_loops (fn2);
6814 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6816 /* Recurse to children. */
6817 for (loop = loop->inner; loop; loop = loop->next)
6818 fixup_loop_arrays_after_move (fn1, fn2, loop);
6821 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6822 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6823 single basic block in the original CFG and the new basic block is
6824 returned. DEST_CFUN must not have a CFG yet.
6826 Note that the region need not be a pure SESE region. Blocks inside
6827 the region may contain calls to abort/exit. The only restriction
6828 is that ENTRY_BB should be the only entry point and it must
6829 dominate EXIT_BB.
6831 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6832 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6833 to the new function.
6835 All local variables referenced in the region are assumed to be in
6836 the corresponding BLOCK_VARS and unexpanded variable lists
6837 associated with DEST_CFUN. */
6839 basic_block
6840 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6841 basic_block exit_bb, tree orig_block)
6843 vec<basic_block> bbs, dom_bbs;
6844 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6845 basic_block after, bb, *entry_pred, *exit_succ, abb;
6846 struct function *saved_cfun = cfun;
6847 int *entry_flag, *exit_flag;
6848 unsigned *entry_prob, *exit_prob;
6849 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6850 edge e;
6851 edge_iterator ei;
6852 htab_t new_label_map;
6853 struct pointer_map_t *vars_map, *eh_map;
6854 struct loop *loop = entry_bb->loop_father;
6855 struct loop *loop0 = get_loop (saved_cfun, 0);
6856 struct move_stmt_d d;
6858 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6859 region. */
6860 gcc_assert (entry_bb != exit_bb
6861 && (!exit_bb
6862 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6864 /* Collect all the blocks in the region. Manually add ENTRY_BB
6865 because it won't be added by dfs_enumerate_from. */
6866 bbs.create (0);
6867 bbs.safe_push (entry_bb);
6868 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6870 /* The blocks that used to be dominated by something in BBS will now be
6871 dominated by the new block. */
6872 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6873 bbs.address (),
6874 bbs.length ());
6876 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6877 the predecessor edges to ENTRY_BB and the successor edges to
6878 EXIT_BB so that we can re-attach them to the new basic block that
6879 will replace the region. */
6880 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6881 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6882 entry_flag = XNEWVEC (int, num_entry_edges);
6883 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6884 i = 0;
6885 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6887 entry_prob[i] = e->probability;
6888 entry_flag[i] = e->flags;
6889 entry_pred[i++] = e->src;
6890 remove_edge (e);
6893 if (exit_bb)
6895 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6896 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6897 exit_flag = XNEWVEC (int, num_exit_edges);
6898 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6899 i = 0;
6900 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6902 exit_prob[i] = e->probability;
6903 exit_flag[i] = e->flags;
6904 exit_succ[i++] = e->dest;
6905 remove_edge (e);
6908 else
6910 num_exit_edges = 0;
6911 exit_succ = NULL;
6912 exit_flag = NULL;
6913 exit_prob = NULL;
6916 /* Switch context to the child function to initialize DEST_FN's CFG. */
6917 gcc_assert (dest_cfun->cfg == NULL);
6918 push_cfun (dest_cfun);
6920 init_empty_tree_cfg ();
6922 /* Initialize EH information for the new function. */
6923 eh_map = NULL;
6924 new_label_map = NULL;
6925 if (saved_cfun->eh)
6927 eh_region region = NULL;
6929 FOR_EACH_VEC_ELT (bbs, i, bb)
6930 region = find_outermost_region_in_block (saved_cfun, bb, region);
6932 init_eh_for_function ();
6933 if (region != NULL)
6935 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6936 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6937 new_label_mapper, new_label_map);
6941 /* Initialize an empty loop tree. */
6942 struct loops *loops = ggc_cleared_alloc<struct loops> ();
6943 init_loops_structure (dest_cfun, loops, 1);
6944 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6945 set_loops_for_fn (dest_cfun, loops);
6947 /* Move the outlined loop tree part. */
6948 num_nodes = bbs.length ();
6949 FOR_EACH_VEC_ELT (bbs, i, bb)
6951 if (bb->loop_father->header == bb)
6953 struct loop *this_loop = bb->loop_father;
6954 struct loop *outer = loop_outer (this_loop);
6955 if (outer == loop
6956 /* If the SESE region contains some bbs ending with
6957 a noreturn call, those are considered to belong
6958 to the outermost loop in saved_cfun, rather than
6959 the entry_bb's loop_father. */
6960 || outer == loop0)
6962 if (outer != loop)
6963 num_nodes -= this_loop->num_nodes;
6964 flow_loop_tree_node_remove (bb->loop_father);
6965 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6966 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6969 else if (bb->loop_father == loop0 && loop0 != loop)
6970 num_nodes--;
6972 /* Remove loop exits from the outlined region. */
6973 if (loops_for_fn (saved_cfun)->exits)
6974 FOR_EACH_EDGE (e, ei, bb->succs)
6976 void **slot = htab_find_slot_with_hash
6977 (loops_for_fn (saved_cfun)->exits, e,
6978 htab_hash_pointer (e), NO_INSERT);
6979 if (slot)
6980 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6985 /* Adjust the number of blocks in the tree root of the outlined part. */
6986 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6988 /* Setup a mapping to be used by move_block_to_fn. */
6989 loop->aux = current_loops->tree_root;
6990 loop0->aux = current_loops->tree_root;
6992 pop_cfun ();
6994 /* Move blocks from BBS into DEST_CFUN. */
6995 gcc_assert (bbs.length () >= 2);
6996 after = dest_cfun->cfg->x_entry_block_ptr;
6997 vars_map = pointer_map_create ();
6999 memset (&d, 0, sizeof (d));
7000 d.orig_block = orig_block;
7001 d.new_block = DECL_INITIAL (dest_cfun->decl);
7002 d.from_context = cfun->decl;
7003 d.to_context = dest_cfun->decl;
7004 d.vars_map = vars_map;
7005 d.new_label_map = new_label_map;
7006 d.eh_map = eh_map;
7007 d.remap_decls_p = true;
7009 FOR_EACH_VEC_ELT (bbs, i, bb)
7011 /* No need to update edge counts on the last block. It has
7012 already been updated earlier when we detached the region from
7013 the original CFG. */
7014 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7015 after = bb;
7018 loop->aux = NULL;
7019 loop0->aux = NULL;
7020 /* Loop sizes are no longer correct, fix them up. */
7021 loop->num_nodes -= num_nodes;
7022 for (struct loop *outer = loop_outer (loop);
7023 outer; outer = loop_outer (outer))
7024 outer->num_nodes -= num_nodes;
7025 loop0->num_nodes -= bbs.length () - num_nodes;
7027 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7029 struct loop *aloop;
7030 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7031 if (aloop != NULL)
7033 if (aloop->simduid)
7035 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7036 d.to_context);
7037 dest_cfun->has_simduid_loops = true;
7039 if (aloop->force_vectorize)
7040 dest_cfun->has_force_vectorize_loops = true;
7044 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7045 if (orig_block)
7047 tree block;
7048 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7049 == NULL_TREE);
7050 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7051 = BLOCK_SUBBLOCKS (orig_block);
7052 for (block = BLOCK_SUBBLOCKS (orig_block);
7053 block; block = BLOCK_CHAIN (block))
7054 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7055 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7058 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7059 vars_map, dest_cfun->decl);
7061 if (new_label_map)
7062 htab_delete (new_label_map);
7063 if (eh_map)
7064 pointer_map_destroy (eh_map);
7065 pointer_map_destroy (vars_map);
7067 /* Rewire the entry and exit blocks. The successor to the entry
7068 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7069 the child function. Similarly, the predecessor of DEST_FN's
7070 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7071 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7072 various CFG manipulation function get to the right CFG.
7074 FIXME, this is silly. The CFG ought to become a parameter to
7075 these helpers. */
7076 push_cfun (dest_cfun);
7077 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7078 if (exit_bb)
7079 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7080 pop_cfun ();
7082 /* Back in the original function, the SESE region has disappeared,
7083 create a new basic block in its place. */
7084 bb = create_empty_bb (entry_pred[0]);
7085 if (current_loops)
7086 add_bb_to_loop (bb, loop);
7087 for (i = 0; i < num_entry_edges; i++)
7089 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7090 e->probability = entry_prob[i];
7093 for (i = 0; i < num_exit_edges; i++)
7095 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7096 e->probability = exit_prob[i];
7099 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7100 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7101 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7102 dom_bbs.release ();
7104 if (exit_bb)
7106 free (exit_prob);
7107 free (exit_flag);
7108 free (exit_succ);
7110 free (entry_prob);
7111 free (entry_flag);
7112 free (entry_pred);
7113 bbs.release ();
7115 return bb;
7119 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7122 void
7123 dump_function_to_file (tree fndecl, FILE *file, int flags)
7125 tree arg, var, old_current_fndecl = current_function_decl;
7126 struct function *dsf;
7127 bool ignore_topmost_bind = false, any_var = false;
7128 basic_block bb;
7129 tree chain;
7130 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7131 && decl_is_tm_clone (fndecl));
7132 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7134 current_function_decl = fndecl;
7135 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7137 arg = DECL_ARGUMENTS (fndecl);
7138 while (arg)
7140 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7141 fprintf (file, " ");
7142 print_generic_expr (file, arg, dump_flags);
7143 if (flags & TDF_VERBOSE)
7144 print_node (file, "", arg, 4);
7145 if (DECL_CHAIN (arg))
7146 fprintf (file, ", ");
7147 arg = DECL_CHAIN (arg);
7149 fprintf (file, ")\n");
7151 if (flags & TDF_VERBOSE)
7152 print_node (file, "", fndecl, 2);
7154 dsf = DECL_STRUCT_FUNCTION (fndecl);
7155 if (dsf && (flags & TDF_EH))
7156 dump_eh_tree (file, dsf);
7158 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7160 dump_node (fndecl, TDF_SLIM | flags, file);
7161 current_function_decl = old_current_fndecl;
7162 return;
7165 /* When GIMPLE is lowered, the variables are no longer available in
7166 BIND_EXPRs, so display them separately. */
7167 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7169 unsigned ix;
7170 ignore_topmost_bind = true;
7172 fprintf (file, "{\n");
7173 if (!vec_safe_is_empty (fun->local_decls))
7174 FOR_EACH_LOCAL_DECL (fun, ix, var)
7176 print_generic_decl (file, var, flags);
7177 if (flags & TDF_VERBOSE)
7178 print_node (file, "", var, 4);
7179 fprintf (file, "\n");
7181 any_var = true;
7183 if (gimple_in_ssa_p (cfun))
7184 for (ix = 1; ix < num_ssa_names; ++ix)
7186 tree name = ssa_name (ix);
7187 if (name && !SSA_NAME_VAR (name))
7189 fprintf (file, " ");
7190 print_generic_expr (file, TREE_TYPE (name), flags);
7191 fprintf (file, " ");
7192 print_generic_expr (file, name, flags);
7193 fprintf (file, ";\n");
7195 any_var = true;
7200 if (fun && fun->decl == fndecl
7201 && fun->cfg
7202 && basic_block_info_for_fn (fun))
7204 /* If the CFG has been built, emit a CFG-based dump. */
7205 if (!ignore_topmost_bind)
7206 fprintf (file, "{\n");
7208 if (any_var && n_basic_blocks_for_fn (fun))
7209 fprintf (file, "\n");
7211 FOR_EACH_BB_FN (bb, fun)
7212 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7214 fprintf (file, "}\n");
7216 else if (DECL_SAVED_TREE (fndecl) == NULL)
7218 /* The function is now in GIMPLE form but the CFG has not been
7219 built yet. Emit the single sequence of GIMPLE statements
7220 that make up its body. */
7221 gimple_seq body = gimple_body (fndecl);
7223 if (gimple_seq_first_stmt (body)
7224 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7225 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7226 print_gimple_seq (file, body, 0, flags);
7227 else
7229 if (!ignore_topmost_bind)
7230 fprintf (file, "{\n");
7232 if (any_var)
7233 fprintf (file, "\n");
7235 print_gimple_seq (file, body, 2, flags);
7236 fprintf (file, "}\n");
7239 else
7241 int indent;
7243 /* Make a tree based dump. */
7244 chain = DECL_SAVED_TREE (fndecl);
7245 if (chain && TREE_CODE (chain) == BIND_EXPR)
7247 if (ignore_topmost_bind)
7249 chain = BIND_EXPR_BODY (chain);
7250 indent = 2;
7252 else
7253 indent = 0;
7255 else
7257 if (!ignore_topmost_bind)
7258 fprintf (file, "{\n");
7259 indent = 2;
7262 if (any_var)
7263 fprintf (file, "\n");
7265 print_generic_stmt_indented (file, chain, flags, indent);
7266 if (ignore_topmost_bind)
7267 fprintf (file, "}\n");
7270 if (flags & TDF_ENUMERATE_LOCALS)
7271 dump_enumerated_decls (file, flags);
7272 fprintf (file, "\n\n");
7274 current_function_decl = old_current_fndecl;
7277 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7279 DEBUG_FUNCTION void
7280 debug_function (tree fn, int flags)
7282 dump_function_to_file (fn, stderr, flags);
7286 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7288 static void
7289 print_pred_bbs (FILE *file, basic_block bb)
7291 edge e;
7292 edge_iterator ei;
7294 FOR_EACH_EDGE (e, ei, bb->preds)
7295 fprintf (file, "bb_%d ", e->src->index);
7299 /* Print on FILE the indexes for the successors of basic_block BB. */
7301 static void
7302 print_succ_bbs (FILE *file, basic_block bb)
7304 edge e;
7305 edge_iterator ei;
7307 FOR_EACH_EDGE (e, ei, bb->succs)
7308 fprintf (file, "bb_%d ", e->dest->index);
7311 /* Print to FILE the basic block BB following the VERBOSITY level. */
7313 void
7314 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7316 char *s_indent = (char *) alloca ((size_t) indent + 1);
7317 memset ((void *) s_indent, ' ', (size_t) indent);
7318 s_indent[indent] = '\0';
7320 /* Print basic_block's header. */
7321 if (verbosity >= 2)
7323 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7324 print_pred_bbs (file, bb);
7325 fprintf (file, "}, succs = {");
7326 print_succ_bbs (file, bb);
7327 fprintf (file, "})\n");
7330 /* Print basic_block's body. */
7331 if (verbosity >= 3)
7333 fprintf (file, "%s {\n", s_indent);
7334 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7335 fprintf (file, "%s }\n", s_indent);
7339 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7341 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7342 VERBOSITY level this outputs the contents of the loop, or just its
7343 structure. */
7345 static void
7346 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7348 char *s_indent;
7349 basic_block bb;
7351 if (loop == NULL)
7352 return;
7354 s_indent = (char *) alloca ((size_t) indent + 1);
7355 memset ((void *) s_indent, ' ', (size_t) indent);
7356 s_indent[indent] = '\0';
7358 /* Print loop's header. */
7359 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7360 if (loop->header)
7361 fprintf (file, "header = %d", loop->header->index);
7362 else
7364 fprintf (file, "deleted)\n");
7365 return;
7367 if (loop->latch)
7368 fprintf (file, ", latch = %d", loop->latch->index);
7369 else
7370 fprintf (file, ", multiple latches");
7371 fprintf (file, ", niter = ");
7372 print_generic_expr (file, loop->nb_iterations, 0);
7374 if (loop->any_upper_bound)
7376 fprintf (file, ", upper_bound = ");
7377 print_decu (loop->nb_iterations_upper_bound, file);
7380 if (loop->any_estimate)
7382 fprintf (file, ", estimate = ");
7383 print_decu (loop->nb_iterations_estimate, file);
7385 fprintf (file, ")\n");
7387 /* Print loop's body. */
7388 if (verbosity >= 1)
7390 fprintf (file, "%s{\n", s_indent);
7391 FOR_EACH_BB_FN (bb, cfun)
7392 if (bb->loop_father == loop)
7393 print_loops_bb (file, bb, indent, verbosity);
7395 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7396 fprintf (file, "%s}\n", s_indent);
7400 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7401 spaces. Following VERBOSITY level this outputs the contents of the
7402 loop, or just its structure. */
7404 static void
7405 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7406 int verbosity)
7408 if (loop == NULL)
7409 return;
7411 print_loop (file, loop, indent, verbosity);
7412 print_loop_and_siblings (file, loop->next, indent, verbosity);
7415 /* Follow a CFG edge from the entry point of the program, and on entry
7416 of a loop, pretty print the loop structure on FILE. */
7418 void
7419 print_loops (FILE *file, int verbosity)
7421 basic_block bb;
7423 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7424 if (bb && bb->loop_father)
7425 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7428 /* Dump a loop. */
7430 DEBUG_FUNCTION void
7431 debug (struct loop &ref)
7433 print_loop (stderr, &ref, 0, /*verbosity*/0);
7436 DEBUG_FUNCTION void
7437 debug (struct loop *ptr)
7439 if (ptr)
7440 debug (*ptr);
7441 else
7442 fprintf (stderr, "<nil>\n");
7445 /* Dump a loop verbosely. */
7447 DEBUG_FUNCTION void
7448 debug_verbose (struct loop &ref)
7450 print_loop (stderr, &ref, 0, /*verbosity*/3);
7453 DEBUG_FUNCTION void
7454 debug_verbose (struct loop *ptr)
7456 if (ptr)
7457 debug (*ptr);
7458 else
7459 fprintf (stderr, "<nil>\n");
7463 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7465 DEBUG_FUNCTION void
7466 debug_loops (int verbosity)
7468 print_loops (stderr, verbosity);
7471 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7473 DEBUG_FUNCTION void
7474 debug_loop (struct loop *loop, int verbosity)
7476 print_loop (stderr, loop, 0, verbosity);
7479 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7480 level. */
7482 DEBUG_FUNCTION void
7483 debug_loop_num (unsigned num, int verbosity)
7485 debug_loop (get_loop (cfun, num), verbosity);
7488 /* Return true if BB ends with a call, possibly followed by some
7489 instructions that must stay with the call. Return false,
7490 otherwise. */
7492 static bool
7493 gimple_block_ends_with_call_p (basic_block bb)
7495 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7496 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7500 /* Return true if BB ends with a conditional branch. Return false,
7501 otherwise. */
7503 static bool
7504 gimple_block_ends_with_condjump_p (const_basic_block bb)
7506 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7507 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7511 /* Return true if we need to add fake edge to exit at statement T.
7512 Helper function for gimple_flow_call_edges_add. */
7514 static bool
7515 need_fake_edge_p (gimple t)
7517 tree fndecl = NULL_TREE;
7518 int call_flags = 0;
7520 /* NORETURN and LONGJMP calls already have an edge to exit.
7521 CONST and PURE calls do not need one.
7522 We don't currently check for CONST and PURE here, although
7523 it would be a good idea, because those attributes are
7524 figured out from the RTL in mark_constant_function, and
7525 the counter incrementation code from -fprofile-arcs
7526 leads to different results from -fbranch-probabilities. */
7527 if (is_gimple_call (t))
7529 fndecl = gimple_call_fndecl (t);
7530 call_flags = gimple_call_flags (t);
7533 if (is_gimple_call (t)
7534 && fndecl
7535 && DECL_BUILT_IN (fndecl)
7536 && (call_flags & ECF_NOTHROW)
7537 && !(call_flags & ECF_RETURNS_TWICE)
7538 /* fork() doesn't really return twice, but the effect of
7539 wrapping it in __gcov_fork() which calls __gcov_flush()
7540 and clears the counters before forking has the same
7541 effect as returning twice. Force a fake edge. */
7542 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7543 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7544 return false;
7546 if (is_gimple_call (t))
7548 edge_iterator ei;
7549 edge e;
7550 basic_block bb;
7552 if (!(call_flags & ECF_NORETURN))
7553 return true;
7555 bb = gimple_bb (t);
7556 FOR_EACH_EDGE (e, ei, bb->succs)
7557 if ((e->flags & EDGE_FAKE) == 0)
7558 return true;
7561 if (gimple_code (t) == GIMPLE_ASM
7562 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7563 return true;
7565 return false;
7569 /* Add fake edges to the function exit for any non constant and non
7570 noreturn calls (or noreturn calls with EH/abnormal edges),
7571 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7572 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7573 that were split.
7575 The goal is to expose cases in which entering a basic block does
7576 not imply that all subsequent instructions must be executed. */
7578 static int
7579 gimple_flow_call_edges_add (sbitmap blocks)
7581 int i;
7582 int blocks_split = 0;
7583 int last_bb = last_basic_block_for_fn (cfun);
7584 bool check_last_block = false;
7586 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7587 return 0;
7589 if (! blocks)
7590 check_last_block = true;
7591 else
7592 check_last_block = bitmap_bit_p (blocks,
7593 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7595 /* In the last basic block, before epilogue generation, there will be
7596 a fallthru edge to EXIT. Special care is required if the last insn
7597 of the last basic block is a call because make_edge folds duplicate
7598 edges, which would result in the fallthru edge also being marked
7599 fake, which would result in the fallthru edge being removed by
7600 remove_fake_edges, which would result in an invalid CFG.
7602 Moreover, we can't elide the outgoing fake edge, since the block
7603 profiler needs to take this into account in order to solve the minimal
7604 spanning tree in the case that the call doesn't return.
7606 Handle this by adding a dummy instruction in a new last basic block. */
7607 if (check_last_block)
7609 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7610 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7611 gimple t = NULL;
7613 if (!gsi_end_p (gsi))
7614 t = gsi_stmt (gsi);
7616 if (t && need_fake_edge_p (t))
7618 edge e;
7620 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7621 if (e)
7623 gsi_insert_on_edge (e, gimple_build_nop ());
7624 gsi_commit_edge_inserts ();
7629 /* Now add fake edges to the function exit for any non constant
7630 calls since there is no way that we can determine if they will
7631 return or not... */
7632 for (i = 0; i < last_bb; i++)
7634 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7635 gimple_stmt_iterator gsi;
7636 gimple stmt, last_stmt;
7638 if (!bb)
7639 continue;
7641 if (blocks && !bitmap_bit_p (blocks, i))
7642 continue;
7644 gsi = gsi_last_nondebug_bb (bb);
7645 if (!gsi_end_p (gsi))
7647 last_stmt = gsi_stmt (gsi);
7650 stmt = gsi_stmt (gsi);
7651 if (need_fake_edge_p (stmt))
7653 edge e;
7655 /* The handling above of the final block before the
7656 epilogue should be enough to verify that there is
7657 no edge to the exit block in CFG already.
7658 Calling make_edge in such case would cause us to
7659 mark that edge as fake and remove it later. */
7660 #ifdef ENABLE_CHECKING
7661 if (stmt == last_stmt)
7663 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7664 gcc_assert (e == NULL);
7666 #endif
7668 /* Note that the following may create a new basic block
7669 and renumber the existing basic blocks. */
7670 if (stmt != last_stmt)
7672 e = split_block (bb, stmt);
7673 if (e)
7674 blocks_split++;
7676 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7678 gsi_prev (&gsi);
7680 while (!gsi_end_p (gsi));
7684 if (blocks_split)
7685 verify_flow_info ();
7687 return blocks_split;
7690 /* Removes edge E and all the blocks dominated by it, and updates dominance
7691 information. The IL in E->src needs to be updated separately.
7692 If dominance info is not available, only the edge E is removed.*/
7694 void
7695 remove_edge_and_dominated_blocks (edge e)
7697 vec<basic_block> bbs_to_remove = vNULL;
7698 vec<basic_block> bbs_to_fix_dom = vNULL;
7699 bitmap df, df_idom;
7700 edge f;
7701 edge_iterator ei;
7702 bool none_removed = false;
7703 unsigned i;
7704 basic_block bb, dbb;
7705 bitmap_iterator bi;
7707 if (!dom_info_available_p (CDI_DOMINATORS))
7709 remove_edge (e);
7710 return;
7713 /* No updating is needed for edges to exit. */
7714 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7716 if (cfgcleanup_altered_bbs)
7717 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7718 remove_edge (e);
7719 return;
7722 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7723 that is not dominated by E->dest, then this set is empty. Otherwise,
7724 all the basic blocks dominated by E->dest are removed.
7726 Also, to DF_IDOM we store the immediate dominators of the blocks in
7727 the dominance frontier of E (i.e., of the successors of the
7728 removed blocks, if there are any, and of E->dest otherwise). */
7729 FOR_EACH_EDGE (f, ei, e->dest->preds)
7731 if (f == e)
7732 continue;
7734 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7736 none_removed = true;
7737 break;
7741 df = BITMAP_ALLOC (NULL);
7742 df_idom = BITMAP_ALLOC (NULL);
7744 if (none_removed)
7745 bitmap_set_bit (df_idom,
7746 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7747 else
7749 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7750 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7752 FOR_EACH_EDGE (f, ei, bb->succs)
7754 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7755 bitmap_set_bit (df, f->dest->index);
7758 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7759 bitmap_clear_bit (df, bb->index);
7761 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7763 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7764 bitmap_set_bit (df_idom,
7765 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7769 if (cfgcleanup_altered_bbs)
7771 /* Record the set of the altered basic blocks. */
7772 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7773 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7776 /* Remove E and the cancelled blocks. */
7777 if (none_removed)
7778 remove_edge (e);
7779 else
7781 /* Walk backwards so as to get a chance to substitute all
7782 released DEFs into debug stmts. See
7783 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7784 details. */
7785 for (i = bbs_to_remove.length (); i-- > 0; )
7786 delete_basic_block (bbs_to_remove[i]);
7789 /* Update the dominance information. The immediate dominator may change only
7790 for blocks whose immediate dominator belongs to DF_IDOM:
7792 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7793 removal. Let Z the arbitrary block such that idom(Z) = Y and
7794 Z dominates X after the removal. Before removal, there exists a path P
7795 from Y to X that avoids Z. Let F be the last edge on P that is
7796 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7797 dominates W, and because of P, Z does not dominate W), and W belongs to
7798 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7799 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7801 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7802 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7803 dbb;
7804 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7805 bbs_to_fix_dom.safe_push (dbb);
7808 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7810 BITMAP_FREE (df);
7811 BITMAP_FREE (df_idom);
7812 bbs_to_remove.release ();
7813 bbs_to_fix_dom.release ();
7816 /* Purge dead EH edges from basic block BB. */
7818 bool
7819 gimple_purge_dead_eh_edges (basic_block bb)
7821 bool changed = false;
7822 edge e;
7823 edge_iterator ei;
7824 gimple stmt = last_stmt (bb);
7826 if (stmt && stmt_can_throw_internal (stmt))
7827 return false;
7829 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7831 if (e->flags & EDGE_EH)
7833 remove_edge_and_dominated_blocks (e);
7834 changed = true;
7836 else
7837 ei_next (&ei);
7840 return changed;
7843 /* Purge dead EH edges from basic block listed in BLOCKS. */
7845 bool
7846 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7848 bool changed = false;
7849 unsigned i;
7850 bitmap_iterator bi;
7852 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7854 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7856 /* Earlier gimple_purge_dead_eh_edges could have removed
7857 this basic block already. */
7858 gcc_assert (bb || changed);
7859 if (bb != NULL)
7860 changed |= gimple_purge_dead_eh_edges (bb);
7863 return changed;
7866 /* Purge dead abnormal call edges from basic block BB. */
7868 bool
7869 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7871 bool changed = false;
7872 edge e;
7873 edge_iterator ei;
7874 gimple stmt = last_stmt (bb);
7876 if (!cfun->has_nonlocal_label
7877 && !cfun->calls_setjmp)
7878 return false;
7880 if (stmt && stmt_can_make_abnormal_goto (stmt))
7881 return false;
7883 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7885 if (e->flags & EDGE_ABNORMAL)
7887 if (e->flags & EDGE_FALLTHRU)
7888 e->flags &= ~EDGE_ABNORMAL;
7889 else
7890 remove_edge_and_dominated_blocks (e);
7891 changed = true;
7893 else
7894 ei_next (&ei);
7897 return changed;
7900 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7902 bool
7903 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7905 bool changed = false;
7906 unsigned i;
7907 bitmap_iterator bi;
7909 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7911 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7913 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7914 this basic block already. */
7915 gcc_assert (bb || changed);
7916 if (bb != NULL)
7917 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7920 return changed;
7923 /* This function is called whenever a new edge is created or
7924 redirected. */
7926 static void
7927 gimple_execute_on_growing_pred (edge e)
7929 basic_block bb = e->dest;
7931 if (!gimple_seq_empty_p (phi_nodes (bb)))
7932 reserve_phi_args_for_new_edge (bb);
7935 /* This function is called immediately before edge E is removed from
7936 the edge vector E->dest->preds. */
7938 static void
7939 gimple_execute_on_shrinking_pred (edge e)
7941 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7942 remove_phi_args (e);
7945 /*---------------------------------------------------------------------------
7946 Helper functions for Loop versioning
7947 ---------------------------------------------------------------------------*/
7949 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7950 of 'first'. Both of them are dominated by 'new_head' basic block. When
7951 'new_head' was created by 'second's incoming edge it received phi arguments
7952 on the edge by split_edge(). Later, additional edge 'e' was created to
7953 connect 'new_head' and 'first'. Now this routine adds phi args on this
7954 additional edge 'e' that new_head to second edge received as part of edge
7955 splitting. */
7957 static void
7958 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7959 basic_block new_head, edge e)
7961 gimple phi1, phi2;
7962 gimple_stmt_iterator psi1, psi2;
7963 tree def;
7964 edge e2 = find_edge (new_head, second);
7966 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7967 edge, we should always have an edge from NEW_HEAD to SECOND. */
7968 gcc_assert (e2 != NULL);
7970 /* Browse all 'second' basic block phi nodes and add phi args to
7971 edge 'e' for 'first' head. PHI args are always in correct order. */
7973 for (psi2 = gsi_start_phis (second),
7974 psi1 = gsi_start_phis (first);
7975 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7976 gsi_next (&psi2), gsi_next (&psi1))
7978 phi1 = gsi_stmt (psi1);
7979 phi2 = gsi_stmt (psi2);
7980 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7981 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7986 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7987 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7988 the destination of the ELSE part. */
7990 static void
7991 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7992 basic_block second_head ATTRIBUTE_UNUSED,
7993 basic_block cond_bb, void *cond_e)
7995 gimple_stmt_iterator gsi;
7996 gimple new_cond_expr;
7997 tree cond_expr = (tree) cond_e;
7998 edge e0;
8000 /* Build new conditional expr */
8001 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8002 NULL_TREE, NULL_TREE);
8004 /* Add new cond in cond_bb. */
8005 gsi = gsi_last_bb (cond_bb);
8006 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8008 /* Adjust edges appropriately to connect new head with first head
8009 as well as second head. */
8010 e0 = single_succ_edge (cond_bb);
8011 e0->flags &= ~EDGE_FALLTHRU;
8012 e0->flags |= EDGE_FALSE_VALUE;
8016 /* Do book-keeping of basic block BB for the profile consistency checker.
8017 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8018 then do post-pass accounting. Store the counting in RECORD. */
8019 static void
8020 gimple_account_profile_record (basic_block bb, int after_pass,
8021 struct profile_record *record)
8023 gimple_stmt_iterator i;
8024 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8026 record->size[after_pass]
8027 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8028 if (profile_status_for_fn (cfun) == PROFILE_READ)
8029 record->time[after_pass]
8030 += estimate_num_insns (gsi_stmt (i),
8031 &eni_time_weights) * bb->count;
8032 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8033 record->time[after_pass]
8034 += estimate_num_insns (gsi_stmt (i),
8035 &eni_time_weights) * bb->frequency;
8039 struct cfg_hooks gimple_cfg_hooks = {
8040 "gimple",
8041 gimple_verify_flow_info,
8042 gimple_dump_bb, /* dump_bb */
8043 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8044 create_bb, /* create_basic_block */
8045 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8046 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8047 gimple_can_remove_branch_p, /* can_remove_branch_p */
8048 remove_bb, /* delete_basic_block */
8049 gimple_split_block, /* split_block */
8050 gimple_move_block_after, /* move_block_after */
8051 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8052 gimple_merge_blocks, /* merge_blocks */
8053 gimple_predict_edge, /* predict_edge */
8054 gimple_predicted_by_p, /* predicted_by_p */
8055 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8056 gimple_duplicate_bb, /* duplicate_block */
8057 gimple_split_edge, /* split_edge */
8058 gimple_make_forwarder_block, /* make_forward_block */
8059 NULL, /* tidy_fallthru_edge */
8060 NULL, /* force_nonfallthru */
8061 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8062 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8063 gimple_flow_call_edges_add, /* flow_call_edges_add */
8064 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8065 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8066 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8067 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8068 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8069 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8070 flush_pending_stmts, /* flush_pending_stmts */
8071 gimple_empty_block_p, /* block_empty_p */
8072 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8073 gimple_account_profile_record,
8077 /* Split all critical edges. */
8079 unsigned int
8080 split_critical_edges (void)
8082 basic_block bb;
8083 edge e;
8084 edge_iterator ei;
8086 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8087 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8088 mappings around the calls to split_edge. */
8089 start_recording_case_labels ();
8090 FOR_ALL_BB_FN (bb, cfun)
8092 FOR_EACH_EDGE (e, ei, bb->succs)
8094 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8095 split_edge (e);
8096 /* PRE inserts statements to edges and expects that
8097 since split_critical_edges was done beforehand, committing edge
8098 insertions will not split more edges. In addition to critical
8099 edges we must split edges that have multiple successors and
8100 end by control flow statements, such as RESX.
8101 Go ahead and split them too. This matches the logic in
8102 gimple_find_edge_insert_loc. */
8103 else if ((!single_pred_p (e->dest)
8104 || !gimple_seq_empty_p (phi_nodes (e->dest))
8105 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8106 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8107 && !(e->flags & EDGE_ABNORMAL))
8109 gimple_stmt_iterator gsi;
8111 gsi = gsi_last_bb (e->src);
8112 if (!gsi_end_p (gsi)
8113 && stmt_ends_bb_p (gsi_stmt (gsi))
8114 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8115 && !gimple_call_builtin_p (gsi_stmt (gsi),
8116 BUILT_IN_RETURN)))
8117 split_edge (e);
8121 end_recording_case_labels ();
8122 return 0;
8125 namespace {
8127 const pass_data pass_data_split_crit_edges =
8129 GIMPLE_PASS, /* type */
8130 "crited", /* name */
8131 OPTGROUP_NONE, /* optinfo_flags */
8132 true, /* has_execute */
8133 TV_TREE_SPLIT_EDGES, /* tv_id */
8134 PROP_cfg, /* properties_required */
8135 PROP_no_crit_edges, /* properties_provided */
8136 0, /* properties_destroyed */
8137 0, /* todo_flags_start */
8138 0, /* todo_flags_finish */
8141 class pass_split_crit_edges : public gimple_opt_pass
8143 public:
8144 pass_split_crit_edges (gcc::context *ctxt)
8145 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8148 /* opt_pass methods: */
8149 virtual unsigned int execute (function *) { return split_critical_edges (); }
8151 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8152 }; // class pass_split_crit_edges
8154 } // anon namespace
8156 gimple_opt_pass *
8157 make_pass_split_crit_edges (gcc::context *ctxt)
8159 return new pass_split_crit_edges (ctxt);
8163 /* Build a ternary operation and gimplify it. Emit code before GSI.
8164 Return the gimple_val holding the result. */
8166 tree
8167 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8168 tree type, tree a, tree b, tree c)
8170 tree ret;
8171 location_t loc = gimple_location (gsi_stmt (*gsi));
8173 ret = fold_build3_loc (loc, code, type, a, b, c);
8174 STRIP_NOPS (ret);
8176 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8177 GSI_SAME_STMT);
8180 /* Build a binary operation and gimplify it. Emit code before GSI.
8181 Return the gimple_val holding the result. */
8183 tree
8184 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8185 tree type, tree a, tree b)
8187 tree ret;
8189 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8190 STRIP_NOPS (ret);
8192 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8193 GSI_SAME_STMT);
8196 /* Build a unary operation and gimplify it. Emit code before GSI.
8197 Return the gimple_val holding the result. */
8199 tree
8200 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8201 tree a)
8203 tree ret;
8205 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8206 STRIP_NOPS (ret);
8208 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8209 GSI_SAME_STMT);
8214 /* Given a basic block B which ends with a conditional and has
8215 precisely two successors, determine which of the edges is taken if
8216 the conditional is true and which is taken if the conditional is
8217 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8219 void
8220 extract_true_false_edges_from_block (basic_block b,
8221 edge *true_edge,
8222 edge *false_edge)
8224 edge e = EDGE_SUCC (b, 0);
8226 if (e->flags & EDGE_TRUE_VALUE)
8228 *true_edge = e;
8229 *false_edge = EDGE_SUCC (b, 1);
8231 else
8233 *false_edge = e;
8234 *true_edge = EDGE_SUCC (b, 1);
8238 /* Emit return warnings. */
8240 namespace {
8242 const pass_data pass_data_warn_function_return =
8244 GIMPLE_PASS, /* type */
8245 "*warn_function_return", /* name */
8246 OPTGROUP_NONE, /* optinfo_flags */
8247 true, /* has_execute */
8248 TV_NONE, /* tv_id */
8249 PROP_cfg, /* properties_required */
8250 0, /* properties_provided */
8251 0, /* properties_destroyed */
8252 0, /* todo_flags_start */
8253 0, /* todo_flags_finish */
8256 class pass_warn_function_return : public gimple_opt_pass
8258 public:
8259 pass_warn_function_return (gcc::context *ctxt)
8260 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8263 /* opt_pass methods: */
8264 virtual unsigned int execute (function *);
8266 }; // class pass_warn_function_return
8268 unsigned int
8269 pass_warn_function_return::execute (function *fun)
8271 source_location location;
8272 gimple last;
8273 edge e;
8274 edge_iterator ei;
8276 if (!targetm.warn_func_return (fun->decl))
8277 return 0;
8279 /* If we have a path to EXIT, then we do return. */
8280 if (TREE_THIS_VOLATILE (fun->decl)
8281 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8283 location = UNKNOWN_LOCATION;
8284 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8286 last = last_stmt (e->src);
8287 if ((gimple_code (last) == GIMPLE_RETURN
8288 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8289 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8290 break;
8292 if (location == UNKNOWN_LOCATION)
8293 location = cfun->function_end_locus;
8294 warning_at (location, 0, "%<noreturn%> function does return");
8297 /* If we see "return;" in some basic block, then we do reach the end
8298 without returning a value. */
8299 else if (warn_return_type
8300 && !TREE_NO_WARNING (fun->decl)
8301 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8302 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8304 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8306 gimple last = last_stmt (e->src);
8307 if (gimple_code (last) == GIMPLE_RETURN
8308 && gimple_return_retval (last) == NULL
8309 && !gimple_no_warning_p (last))
8311 location = gimple_location (last);
8312 if (location == UNKNOWN_LOCATION)
8313 location = fun->function_end_locus;
8314 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8315 TREE_NO_WARNING (fun->decl) = 1;
8316 break;
8320 return 0;
8323 } // anon namespace
8325 gimple_opt_pass *
8326 make_pass_warn_function_return (gcc::context *ctxt)
8328 return new pass_warn_function_return (ctxt);
8331 /* Walk a gimplified function and warn for functions whose return value is
8332 ignored and attribute((warn_unused_result)) is set. This is done before
8333 inlining, so we don't have to worry about that. */
8335 static void
8336 do_warn_unused_result (gimple_seq seq)
8338 tree fdecl, ftype;
8339 gimple_stmt_iterator i;
8341 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8343 gimple g = gsi_stmt (i);
8345 switch (gimple_code (g))
8347 case GIMPLE_BIND:
8348 do_warn_unused_result (gimple_bind_body (g));
8349 break;
8350 case GIMPLE_TRY:
8351 do_warn_unused_result (gimple_try_eval (g));
8352 do_warn_unused_result (gimple_try_cleanup (g));
8353 break;
8354 case GIMPLE_CATCH:
8355 do_warn_unused_result (gimple_catch_handler (g));
8356 break;
8357 case GIMPLE_EH_FILTER:
8358 do_warn_unused_result (gimple_eh_filter_failure (g));
8359 break;
8361 case GIMPLE_CALL:
8362 if (gimple_call_lhs (g))
8363 break;
8364 if (gimple_call_internal_p (g))
8365 break;
8367 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8368 LHS. All calls whose value is ignored should be
8369 represented like this. Look for the attribute. */
8370 fdecl = gimple_call_fndecl (g);
8371 ftype = gimple_call_fntype (g);
8373 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8375 location_t loc = gimple_location (g);
8377 if (fdecl)
8378 warning_at (loc, OPT_Wunused_result,
8379 "ignoring return value of %qD, "
8380 "declared with attribute warn_unused_result",
8381 fdecl);
8382 else
8383 warning_at (loc, OPT_Wunused_result,
8384 "ignoring return value of function "
8385 "declared with attribute warn_unused_result");
8387 break;
8389 default:
8390 /* Not a container, not a call, or a call whose value is used. */
8391 break;
8396 namespace {
8398 const pass_data pass_data_warn_unused_result =
8400 GIMPLE_PASS, /* type */
8401 "*warn_unused_result", /* name */
8402 OPTGROUP_NONE, /* optinfo_flags */
8403 true, /* has_execute */
8404 TV_NONE, /* tv_id */
8405 PROP_gimple_any, /* properties_required */
8406 0, /* properties_provided */
8407 0, /* properties_destroyed */
8408 0, /* todo_flags_start */
8409 0, /* todo_flags_finish */
8412 class pass_warn_unused_result : public gimple_opt_pass
8414 public:
8415 pass_warn_unused_result (gcc::context *ctxt)
8416 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8419 /* opt_pass methods: */
8420 virtual bool gate (function *) { return flag_warn_unused_result; }
8421 virtual unsigned int execute (function *)
8423 do_warn_unused_result (gimple_body (current_function_decl));
8424 return 0;
8427 }; // class pass_warn_unused_result
8429 } // anon namespace
8431 gimple_opt_pass *
8432 make_pass_warn_unused_result (gcc::context *ctxt)
8434 return new pass_warn_unused_result (ctxt);
8437 /* IPA passes, compilation of earlier functions or inlining
8438 might have changed some properties, such as marked functions nothrow,
8439 pure, const or noreturn.
8440 Remove redundant edges and basic blocks, and create new ones if necessary.
8442 This pass can't be executed as stand alone pass from pass manager, because
8443 in between inlining and this fixup the verify_flow_info would fail. */
8445 unsigned int
8446 execute_fixup_cfg (void)
8448 basic_block bb;
8449 gimple_stmt_iterator gsi;
8450 int todo = 0;
8451 gcov_type count_scale;
8452 edge e;
8453 edge_iterator ei;
8455 count_scale
8456 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8457 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8459 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8460 cgraph_get_node (current_function_decl)->count;
8461 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8462 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8463 count_scale);
8465 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8466 e->count = apply_scale (e->count, count_scale);
8468 FOR_EACH_BB_FN (bb, cfun)
8470 bb->count = apply_scale (bb->count, count_scale);
8471 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8473 gimple stmt = gsi_stmt (gsi);
8474 tree decl = is_gimple_call (stmt)
8475 ? gimple_call_fndecl (stmt)
8476 : NULL;
8477 if (decl)
8479 int flags = gimple_call_flags (stmt);
8480 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8482 if (gimple_purge_dead_abnormal_call_edges (bb))
8483 todo |= TODO_cleanup_cfg;
8485 if (gimple_in_ssa_p (cfun))
8487 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8488 update_stmt (stmt);
8492 if (flags & ECF_NORETURN
8493 && fixup_noreturn_call (stmt))
8494 todo |= TODO_cleanup_cfg;
8497 /* Remove stores to variables we marked write-only.
8498 Keep access when store has side effect, i.e. in case when source
8499 is volatile. */
8500 if (gimple_store_p (stmt)
8501 && !gimple_has_side_effects (stmt))
8503 tree lhs = get_base_address (gimple_get_lhs (stmt));
8505 if (TREE_CODE (lhs) == VAR_DECL
8506 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8507 && varpool_get_node (lhs)->writeonly)
8509 unlink_stmt_vdef (stmt);
8510 gsi_remove (&gsi, true);
8511 release_defs (stmt);
8512 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8513 continue;
8516 /* For calls we can simply remove LHS when it is known
8517 to be write-only. */
8518 if (is_gimple_call (stmt)
8519 && gimple_get_lhs (stmt))
8521 tree lhs = get_base_address (gimple_get_lhs (stmt));
8523 if (TREE_CODE (lhs) == VAR_DECL
8524 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8525 && varpool_get_node (lhs)->writeonly)
8527 gimple_call_set_lhs (stmt, NULL);
8528 update_stmt (stmt);
8529 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8533 if (maybe_clean_eh_stmt (stmt)
8534 && gimple_purge_dead_eh_edges (bb))
8535 todo |= TODO_cleanup_cfg;
8536 gsi_next (&gsi);
8539 FOR_EACH_EDGE (e, ei, bb->succs)
8540 e->count = apply_scale (e->count, count_scale);
8542 /* If we have a basic block with no successors that does not
8543 end with a control statement or a noreturn call end it with
8544 a call to __builtin_unreachable. This situation can occur
8545 when inlining a noreturn call that does in fact return. */
8546 if (EDGE_COUNT (bb->succs) == 0)
8548 gimple stmt = last_stmt (bb);
8549 if (!stmt
8550 || (!is_ctrl_stmt (stmt)
8551 && (!is_gimple_call (stmt)
8552 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8554 stmt = gimple_build_call
8555 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8556 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8557 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8561 if (count_scale != REG_BR_PROB_BASE)
8562 compute_function_frequency ();
8564 /* We just processed all calls. */
8565 if (cfun->gimple_df)
8566 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8568 /* Dump a textual representation of the flowgraph. */
8569 if (dump_file)
8570 gimple_dump_cfg (dump_file, dump_flags);
8572 if (current_loops
8573 && (todo & TODO_cleanup_cfg))
8574 loops_state_set (LOOPS_NEED_FIXUP);
8576 return todo;
8579 namespace {
8581 const pass_data pass_data_fixup_cfg =
8583 GIMPLE_PASS, /* type */
8584 "*free_cfg_annotations", /* name */
8585 OPTGROUP_NONE, /* optinfo_flags */
8586 true, /* has_execute */
8587 TV_NONE, /* tv_id */
8588 PROP_cfg, /* properties_required */
8589 0, /* properties_provided */
8590 0, /* properties_destroyed */
8591 0, /* todo_flags_start */
8592 0, /* todo_flags_finish */
8595 class pass_fixup_cfg : public gimple_opt_pass
8597 public:
8598 pass_fixup_cfg (gcc::context *ctxt)
8599 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8602 /* opt_pass methods: */
8603 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8604 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8606 }; // class pass_fixup_cfg
8608 } // anon namespace
8610 gimple_opt_pass *
8611 make_pass_fixup_cfg (gcc::context *ctxt)
8613 return new pass_fixup_cfg (ctxt);
8616 /* Garbage collection support for edge_def. */
8618 extern void gt_ggc_mx (tree&);
8619 extern void gt_ggc_mx (gimple&);
8620 extern void gt_ggc_mx (rtx&);
8621 extern void gt_ggc_mx (basic_block&);
8623 void
8624 gt_ggc_mx (edge_def *e)
8626 tree block = LOCATION_BLOCK (e->goto_locus);
8627 gt_ggc_mx (e->src);
8628 gt_ggc_mx (e->dest);
8629 if (current_ir_type () == IR_GIMPLE)
8630 gt_ggc_mx (e->insns.g);
8631 else
8632 gt_ggc_mx (e->insns.r);
8633 gt_ggc_mx (block);
8636 /* PCH support for edge_def. */
8638 extern void gt_pch_nx (tree&);
8639 extern void gt_pch_nx (gimple&);
8640 extern void gt_pch_nx (rtx&);
8641 extern void gt_pch_nx (basic_block&);
8643 void
8644 gt_pch_nx (edge_def *e)
8646 tree block = LOCATION_BLOCK (e->goto_locus);
8647 gt_pch_nx (e->src);
8648 gt_pch_nx (e->dest);
8649 if (current_ir_type () == IR_GIMPLE)
8650 gt_pch_nx (e->insns.g);
8651 else
8652 gt_pch_nx (e->insns.r);
8653 gt_pch_nx (block);
8656 void
8657 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8659 tree block = LOCATION_BLOCK (e->goto_locus);
8660 op (&(e->src), cookie);
8661 op (&(e->dest), cookie);
8662 if (current_ir_type () == IR_GIMPLE)
8663 op (&(e->insns.g), cookie);
8664 else
8665 op (&(e->insns.r), cookie);
8666 op (&(block), cookie);