For typo in documentation wrt. C_PTRDIFF_T constant
[official-gcc.git] / gcc / tree-cfg.c
blobe99e1022b6c9cf44ff0520910eb52648b37dd645
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 #include "selftest.h"
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
66 /* Local declarations. */
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity = 20;
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
84 static hash_map<edge, tree> *edge_to_cases;
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
90 static bitmap touched_switch_bbs;
92 /* CFG statistics. */
93 struct cfg_stats_d
95 long num_merged_labels;
98 static struct cfg_stats_d cfg_stats;
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
103 hash_map<tree, tree> *vars_map;
104 tree to_context;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
110 location_t locus;
111 int discriminator;
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 static inline hashval_t hash (const locus_discrim_map *);
119 static inline bool equal (const locus_discrim_map *,
120 const locus_discrim_map *);
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
126 inline hashval_t
127 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 return LOCATION_LINE (item->locus);
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
135 inline bool
136 locus_discrim_hasher::equal (const locus_discrim_map *a,
137 const locus_discrim_map *b)
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
147 /* Edges. */
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block);
151 static void make_gimple_switch_edges (gswitch *, basic_block);
152 static bool make_goto_expr_edges (basic_block);
153 static void make_gimple_asm_edges (basic_block);
154 static edge gimple_redirect_edge_and_branch (edge, basic_block);
155 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple *, gimple *);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge);
161 static gimple *first_non_label_stmt (basic_block);
162 static bool verify_gimple_transaction (gtransaction *);
163 static bool call_can_make_abnormal_goto (gimple *);
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block, basic_block);
167 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
168 static void remove_bb (basic_block);
169 static edge find_taken_edge_computed_goto (basic_block, tree);
170 static edge find_taken_edge_cond_expr (basic_block, tree);
171 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
172 static tree find_case_label_for_value (gswitch *, tree);
173 static void lower_phi_internal_fn ();
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_fn (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity);
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
190 initial_cfg_capacity);
192 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
193 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
196 = EXIT_BLOCK_PTR_FOR_FN (fn);
197 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
198 = ENTRY_BLOCK_PTR_FOR_FN (fn);
201 void
202 init_empty_tree_cfg (void)
204 init_empty_tree_cfg_for_function (cfun);
207 /*---------------------------------------------------------------------------
208 Create basic blocks
209 ---------------------------------------------------------------------------*/
211 /* Entry point to the CFG builder for trees. SEQ is the sequence of
212 statements to be added to the flowgraph. */
214 static void
215 build_gimple_cfg (gimple_seq seq)
217 /* Register specific gimple functions. */
218 gimple_register_cfg_hooks ();
220 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222 init_empty_tree_cfg ();
224 make_blocks (seq);
226 /* Make sure there is always at least one block, even if it's empty. */
227 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
228 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230 /* Adjust the size of the array. */
231 if (basic_block_info_for_fn (cfun)->length ()
232 < (size_t) n_basic_blocks_for_fn (cfun))
233 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
234 n_basic_blocks_for_fn (cfun));
236 /* To speed up statement iterator walks, we first purge dead labels. */
237 cleanup_dead_labels ();
239 /* Group case nodes to reduce the number of edges.
240 We do this after cleaning up dead labels because otherwise we miss
241 a lot of obvious case merging opportunities. */
242 group_case_labels ();
244 /* Create the edges of the flowgraph. */
245 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
246 make_edges ();
247 assign_discriminators ();
248 lower_phi_internal_fn ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
258 static void
259 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_no_vector_kind:
282 loop->dont_vectorize = true;
283 break;
284 case annot_expr_vector_kind:
285 loop->force_vectorize = true;
286 cfun->has_force_vectorize_loops = true;
287 break;
288 default:
289 gcc_unreachable ();
292 stmt = gimple_build_assign (gimple_call_lhs (stmt),
293 gimple_call_arg (stmt, 0));
294 gsi_replace (&gsi, stmt, true);
298 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
299 them and propagate the information to the loop. We assume that the
300 annotations come immediately before the condition of the loop. */
302 static void
303 replace_loop_annotate (void)
305 struct loop *loop;
306 basic_block bb;
307 gimple_stmt_iterator gsi;
308 gimple *stmt;
310 FOR_EACH_LOOP (loop, 0)
312 /* First look into the header. */
313 replace_loop_annotate_in_block (loop->header, loop);
315 /* Then look into the latch, if any. */
316 if (loop->latch)
317 replace_loop_annotate_in_block (loop->latch, loop);
320 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
321 FOR_EACH_BB_FN (bb, cfun)
323 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
325 stmt = gsi_stmt (gsi);
326 if (gimple_code (stmt) != GIMPLE_CALL)
327 continue;
328 if (!gimple_call_internal_p (stmt)
329 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
330 continue;
332 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
334 case annot_expr_ivdep_kind:
335 case annot_expr_no_vector_kind:
336 case annot_expr_vector_kind:
337 break;
338 default:
339 gcc_unreachable ();
342 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
343 stmt = gimple_build_assign (gimple_call_lhs (stmt),
344 gimple_call_arg (stmt, 0));
345 gsi_replace (&gsi, stmt, true);
350 /* Lower internal PHI function from GIMPLE FE. */
352 static void
353 lower_phi_internal_fn ()
355 basic_block bb, pred = NULL;
356 gimple_stmt_iterator gsi;
357 tree lhs;
358 gphi *phi_node;
359 gimple *stmt;
361 /* After edge creation, handle __PHI function from GIMPLE FE. */
362 FOR_EACH_BB_FN (bb, cfun)
364 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
366 stmt = gsi_stmt (gsi);
367 if (! gimple_call_internal_p (stmt, IFN_PHI))
369 gsi_next (&gsi);
370 continue;
373 lhs = gimple_call_lhs (stmt);
374 phi_node = create_phi_node (lhs, bb);
376 /* Add arguments to the PHI node. */
377 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
379 tree arg = gimple_call_arg (stmt, i);
380 if (TREE_CODE (arg) == LABEL_DECL)
381 pred = label_to_block (arg);
382 else
384 edge e = find_edge (pred, bb);
385 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
389 gsi_remove (&gsi, true);
394 static unsigned int
395 execute_build_cfg (void)
397 gimple_seq body = gimple_body (current_function_decl);
399 build_gimple_cfg (body);
400 gimple_set_body (current_function_decl, NULL);
401 if (dump_file && (dump_flags & TDF_DETAILS))
403 fprintf (dump_file, "Scope blocks:\n");
404 dump_scope_blocks (dump_file, dump_flags);
406 cleanup_tree_cfg ();
407 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
408 replace_loop_annotate ();
409 return 0;
412 namespace {
414 const pass_data pass_data_build_cfg =
416 GIMPLE_PASS, /* type */
417 "cfg", /* name */
418 OPTGROUP_NONE, /* optinfo_flags */
419 TV_TREE_CFG, /* tv_id */
420 PROP_gimple_leh, /* properties_required */
421 ( PROP_cfg | PROP_loops ), /* properties_provided */
422 0, /* properties_destroyed */
423 0, /* todo_flags_start */
424 0, /* todo_flags_finish */
427 class pass_build_cfg : public gimple_opt_pass
429 public:
430 pass_build_cfg (gcc::context *ctxt)
431 : gimple_opt_pass (pass_data_build_cfg, ctxt)
434 /* opt_pass methods: */
435 virtual unsigned int execute (function *) { return execute_build_cfg (); }
437 }; // class pass_build_cfg
439 } // anon namespace
441 gimple_opt_pass *
442 make_pass_build_cfg (gcc::context *ctxt)
444 return new pass_build_cfg (ctxt);
448 /* Return true if T is a computed goto. */
450 bool
451 computed_goto_p (gimple *t)
453 return (gimple_code (t) == GIMPLE_GOTO
454 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458 the other edge points to a bb with just __builtin_unreachable ().
459 I.e. return true for C->M edge in:
460 <bb C>:
462 if (something)
463 goto <bb N>;
464 else
465 goto <bb M>;
466 <bb N>:
467 __builtin_unreachable ();
468 <bb M>: */
470 bool
471 assert_unreachable_fallthru_edge_p (edge e)
473 basic_block pred_bb = e->src;
474 gimple *last = last_stmt (pred_bb);
475 if (last && gimple_code (last) == GIMPLE_COND)
477 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478 if (other_bb == e->dest)
479 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480 if (EDGE_COUNT (other_bb->succs) == 0)
482 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
483 gimple *stmt;
485 if (gsi_end_p (gsi))
486 return false;
487 stmt = gsi_stmt (gsi);
488 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
490 gsi_next (&gsi);
491 if (gsi_end_p (gsi))
492 return false;
493 stmt = gsi_stmt (gsi);
495 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
498 return false;
502 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
503 could alter control flow except via eh. We initialize the flag at
504 CFG build time and only ever clear it later. */
506 static void
507 gimple_call_initialize_ctrl_altering (gimple *stmt)
509 int flags = gimple_call_flags (stmt);
511 /* A call alters control flow if it can make an abnormal goto. */
512 if (call_can_make_abnormal_goto (stmt)
513 /* A call also alters control flow if it does not return. */
514 || flags & ECF_NORETURN
515 /* TM ending statements have backedges out of the transaction.
516 Return true so we split the basic block containing them.
517 Note that the TM_BUILTIN test is merely an optimization. */
518 || ((flags & ECF_TM_BUILTIN)
519 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
520 /* BUILT_IN_RETURN call is same as return statement. */
521 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
522 /* IFN_UNIQUE should be the last insn, to make checking for it
523 as cheap as possible. */
524 || (gimple_call_internal_p (stmt)
525 && gimple_call_internal_unique_p (stmt)))
526 gimple_call_set_ctrl_altering (stmt, true);
527 else
528 gimple_call_set_ctrl_altering (stmt, false);
532 /* Insert SEQ after BB and build a flowgraph. */
534 static basic_block
535 make_blocks_1 (gimple_seq seq, basic_block bb)
537 gimple_stmt_iterator i = gsi_start (seq);
538 gimple *stmt = NULL;
539 bool start_new_block = true;
540 bool first_stmt_of_seq = true;
542 while (!gsi_end_p (i))
544 gimple *prev_stmt;
546 prev_stmt = stmt;
547 stmt = gsi_stmt (i);
549 if (stmt && is_gimple_call (stmt))
550 gimple_call_initialize_ctrl_altering (stmt);
552 /* If the statement starts a new basic block or if we have determined
553 in a previous pass that we need to create a new block for STMT, do
554 so now. */
555 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
557 if (!first_stmt_of_seq)
558 gsi_split_seq_before (&i, &seq);
559 bb = create_basic_block (seq, bb);
560 start_new_block = false;
563 /* Now add STMT to BB and create the subgraphs for special statement
564 codes. */
565 gimple_set_bb (stmt, bb);
567 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
568 next iteration. */
569 if (stmt_ends_bb_p (stmt))
571 /* If the stmt can make abnormal goto use a new temporary
572 for the assignment to the LHS. This makes sure the old value
573 of the LHS is available on the abnormal edge. Otherwise
574 we will end up with overlapping life-ranges for abnormal
575 SSA names. */
576 if (gimple_has_lhs (stmt)
577 && stmt_can_make_abnormal_goto (stmt)
578 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
580 tree lhs = gimple_get_lhs (stmt);
581 tree tmp = create_tmp_var (TREE_TYPE (lhs));
582 gimple *s = gimple_build_assign (lhs, tmp);
583 gimple_set_location (s, gimple_location (stmt));
584 gimple_set_block (s, gimple_block (stmt));
585 gimple_set_lhs (stmt, tmp);
586 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
587 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
588 DECL_GIMPLE_REG_P (tmp) = 1;
589 gsi_insert_after (&i, s, GSI_SAME_STMT);
591 start_new_block = true;
594 gsi_next (&i);
595 first_stmt_of_seq = false;
597 return bb;
600 /* Build a flowgraph for the sequence of stmts SEQ. */
602 static void
603 make_blocks (gimple_seq seq)
605 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
608 /* Create and return a new empty basic block after bb AFTER. */
610 static basic_block
611 create_bb (void *h, void *e, basic_block after)
613 basic_block bb;
615 gcc_assert (!e);
617 /* Create and initialize a new basic block. Since alloc_block uses
618 GC allocation that clears memory to allocate a basic block, we do
619 not have to clear the newly allocated basic block here. */
620 bb = alloc_block ();
622 bb->index = last_basic_block_for_fn (cfun);
623 bb->flags = BB_NEW;
624 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
626 /* Add the new block to the linked list of blocks. */
627 link_block (bb, after);
629 /* Grow the basic block array if needed. */
630 if ((size_t) last_basic_block_for_fn (cfun)
631 == basic_block_info_for_fn (cfun)->length ())
633 size_t new_size =
634 (last_basic_block_for_fn (cfun)
635 + (last_basic_block_for_fn (cfun) + 3) / 4);
636 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
639 /* Add the newly created block to the array. */
640 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
642 n_basic_blocks_for_fn (cfun)++;
643 last_basic_block_for_fn (cfun)++;
645 return bb;
649 /*---------------------------------------------------------------------------
650 Edge creation
651 ---------------------------------------------------------------------------*/
653 /* If basic block BB has an abnormal edge to a basic block
654 containing IFN_ABNORMAL_DISPATCHER internal call, return
655 that the dispatcher's basic block, otherwise return NULL. */
657 basic_block
658 get_abnormal_succ_dispatcher (basic_block bb)
660 edge e;
661 edge_iterator ei;
663 FOR_EACH_EDGE (e, ei, bb->succs)
664 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
666 gimple_stmt_iterator gsi
667 = gsi_start_nondebug_after_labels_bb (e->dest);
668 gimple *g = gsi_stmt (gsi);
669 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
670 return e->dest;
672 return NULL;
675 /* Helper function for make_edges. Create a basic block with
676 with ABNORMAL_DISPATCHER internal call in it if needed, and
677 create abnormal edges from BBS to it and from it to FOR_BB
678 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
680 static void
681 handle_abnormal_edges (basic_block *dispatcher_bbs,
682 basic_block for_bb, int *bb_to_omp_idx,
683 auto_vec<basic_block> *bbs, bool computed_goto)
685 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
686 unsigned int idx = 0;
687 basic_block bb;
688 bool inner = false;
690 if (bb_to_omp_idx)
692 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
693 if (bb_to_omp_idx[for_bb->index] != 0)
694 inner = true;
697 /* If the dispatcher has been created already, then there are basic
698 blocks with abnormal edges to it, so just make a new edge to
699 for_bb. */
700 if (*dispatcher == NULL)
702 /* Check if there are any basic blocks that need to have
703 abnormal edges to this dispatcher. If there are none, return
704 early. */
705 if (bb_to_omp_idx == NULL)
707 if (bbs->is_empty ())
708 return;
710 else
712 FOR_EACH_VEC_ELT (*bbs, idx, bb)
713 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
714 break;
715 if (bb == NULL)
716 return;
719 /* Create the dispatcher bb. */
720 *dispatcher = create_basic_block (NULL, for_bb);
721 if (computed_goto)
723 /* Factor computed gotos into a common computed goto site. Also
724 record the location of that site so that we can un-factor the
725 gotos after we have converted back to normal form. */
726 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
728 /* Create the destination of the factored goto. Each original
729 computed goto will put its desired destination into this
730 variable and jump to the label we create immediately below. */
731 tree var = create_tmp_var (ptr_type_node, "gotovar");
733 /* Build a label for the new block which will contain the
734 factored computed goto. */
735 tree factored_label_decl
736 = create_artificial_label (UNKNOWN_LOCATION);
737 gimple *factored_computed_goto_label
738 = gimple_build_label (factored_label_decl);
739 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
741 /* Build our new computed goto. */
742 gimple *factored_computed_goto = gimple_build_goto (var);
743 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
745 FOR_EACH_VEC_ELT (*bbs, idx, bb)
747 if (bb_to_omp_idx
748 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
749 continue;
751 gsi = gsi_last_bb (bb);
752 gimple *last = gsi_stmt (gsi);
754 gcc_assert (computed_goto_p (last));
756 /* Copy the original computed goto's destination into VAR. */
757 gimple *assignment
758 = gimple_build_assign (var, gimple_goto_dest (last));
759 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
761 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
762 e->goto_locus = gimple_location (last);
763 gsi_remove (&gsi, true);
766 else
768 tree arg = inner ? boolean_true_node : boolean_false_node;
769 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
770 1, arg);
771 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
772 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
774 /* Create predecessor edges of the dispatcher. */
775 FOR_EACH_VEC_ELT (*bbs, idx, bb)
777 if (bb_to_omp_idx
778 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
779 continue;
780 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
785 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
788 /* Creates outgoing edges for BB. Returns 1 when it ends with an
789 computed goto, returns 2 when it ends with a statement that
790 might return to this function via an nonlocal goto, otherwise
791 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
793 static int
794 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
796 gimple *last = last_stmt (bb);
797 bool fallthru = false;
798 int ret = 0;
800 if (!last)
801 return ret;
803 switch (gimple_code (last))
805 case GIMPLE_GOTO:
806 if (make_goto_expr_edges (bb))
807 ret = 1;
808 fallthru = false;
809 break;
810 case GIMPLE_RETURN:
812 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
813 e->goto_locus = gimple_location (last);
814 fallthru = false;
816 break;
817 case GIMPLE_COND:
818 make_cond_expr_edges (bb);
819 fallthru = false;
820 break;
821 case GIMPLE_SWITCH:
822 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
823 fallthru = false;
824 break;
825 case GIMPLE_RESX:
826 make_eh_edges (last);
827 fallthru = false;
828 break;
829 case GIMPLE_EH_DISPATCH:
830 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
831 break;
833 case GIMPLE_CALL:
834 /* If this function receives a nonlocal goto, then we need to
835 make edges from this call site to all the nonlocal goto
836 handlers. */
837 if (stmt_can_make_abnormal_goto (last))
838 ret = 2;
840 /* If this statement has reachable exception handlers, then
841 create abnormal edges to them. */
842 make_eh_edges (last);
844 /* BUILTIN_RETURN is really a return statement. */
845 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
847 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
848 fallthru = false;
850 /* Some calls are known not to return. */
851 else
852 fallthru = !gimple_call_noreturn_p (last);
853 break;
855 case GIMPLE_ASSIGN:
856 /* A GIMPLE_ASSIGN may throw internally and thus be considered
857 control-altering. */
858 if (is_ctrl_altering_stmt (last))
859 make_eh_edges (last);
860 fallthru = true;
861 break;
863 case GIMPLE_ASM:
864 make_gimple_asm_edges (bb);
865 fallthru = true;
866 break;
868 CASE_GIMPLE_OMP:
869 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
870 break;
872 case GIMPLE_TRANSACTION:
874 gtransaction *txn = as_a <gtransaction *> (last);
875 tree label1 = gimple_transaction_label_norm (txn);
876 tree label2 = gimple_transaction_label_uninst (txn);
878 if (label1)
879 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
880 if (label2)
881 make_edge (bb, label_to_block (label2),
882 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
884 tree label3 = gimple_transaction_label_over (txn);
885 if (gimple_transaction_subcode (txn)
886 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
887 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
889 fallthru = false;
891 break;
893 default:
894 gcc_assert (!stmt_ends_bb_p (last));
895 fallthru = true;
896 break;
899 if (fallthru)
900 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
902 return ret;
905 /* Join all the blocks in the flowgraph. */
907 static void
908 make_edges (void)
910 basic_block bb;
911 struct omp_region *cur_region = NULL;
912 auto_vec<basic_block> ab_edge_goto;
913 auto_vec<basic_block> ab_edge_call;
914 int *bb_to_omp_idx = NULL;
915 int cur_omp_region_idx = 0;
917 /* Create an edge from entry to the first block with executable
918 statements in it. */
919 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
920 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
921 EDGE_FALLTHRU);
923 /* Traverse the basic block array placing edges. */
924 FOR_EACH_BB_FN (bb, cfun)
926 int mer;
928 if (bb_to_omp_idx)
929 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
931 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
932 if (mer == 1)
933 ab_edge_goto.safe_push (bb);
934 else if (mer == 2)
935 ab_edge_call.safe_push (bb);
937 if (cur_region && bb_to_omp_idx == NULL)
938 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
941 /* Computed gotos are hell to deal with, especially if there are
942 lots of them with a large number of destinations. So we factor
943 them to a common computed goto location before we build the
944 edge list. After we convert back to normal form, we will un-factor
945 the computed gotos since factoring introduces an unwanted jump.
946 For non-local gotos and abnormal edges from calls to calls that return
947 twice or forced labels, factor the abnormal edges too, by having all
948 abnormal edges from the calls go to a common artificial basic block
949 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
950 basic block to all forced labels and calls returning twice.
951 We do this per-OpenMP structured block, because those regions
952 are guaranteed to be single entry single exit by the standard,
953 so it is not allowed to enter or exit such regions abnormally this way,
954 thus all computed gotos, non-local gotos and setjmp/longjmp calls
955 must not transfer control across SESE region boundaries. */
956 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
958 gimple_stmt_iterator gsi;
959 basic_block dispatcher_bb_array[2] = { NULL, NULL };
960 basic_block *dispatcher_bbs = dispatcher_bb_array;
961 int count = n_basic_blocks_for_fn (cfun);
963 if (bb_to_omp_idx)
964 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
966 FOR_EACH_BB_FN (bb, cfun)
968 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
970 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
971 tree target;
973 if (!label_stmt)
974 break;
976 target = gimple_label_label (label_stmt);
978 /* Make an edge to every label block that has been marked as a
979 potential target for a computed goto or a non-local goto. */
980 if (FORCED_LABEL (target))
981 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
982 &ab_edge_goto, true);
983 if (DECL_NONLOCAL (target))
985 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
986 &ab_edge_call, false);
987 break;
991 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
992 gsi_next_nondebug (&gsi);
993 if (!gsi_end_p (gsi))
995 /* Make an edge to every setjmp-like call. */
996 gimple *call_stmt = gsi_stmt (gsi);
997 if (is_gimple_call (call_stmt)
998 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
999 || gimple_call_builtin_p (call_stmt,
1000 BUILT_IN_SETJMP_RECEIVER)))
1001 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1002 &ab_edge_call, false);
1006 if (bb_to_omp_idx)
1007 XDELETE (dispatcher_bbs);
1010 XDELETE (bb_to_omp_idx);
1012 free_omp_regions ();
1015 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1016 needed. Returns true if new bbs were created.
1017 Note: This is transitional code, and should not be used for new code. We
1018 should be able to get rid of this by rewriting all target va-arg
1019 gimplification hooks to use an interface gimple_build_cond_value as described
1020 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1022 bool
1023 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1025 gimple *stmt = gsi_stmt (*gsi);
1026 basic_block bb = gimple_bb (stmt);
1027 basic_block lastbb, afterbb;
1028 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1029 edge e;
1030 lastbb = make_blocks_1 (seq, bb);
1031 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1032 return false;
1033 e = split_block (bb, stmt);
1034 /* Move e->dest to come after the new basic blocks. */
1035 afterbb = e->dest;
1036 unlink_block (afterbb);
1037 link_block (afterbb, lastbb);
1038 redirect_edge_succ (e, bb->next_bb);
1039 bb = bb->next_bb;
1040 while (bb != afterbb)
1042 struct omp_region *cur_region = NULL;
1043 int cur_omp_region_idx = 0;
1044 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1045 gcc_assert (!mer && !cur_region);
1046 add_bb_to_loop (bb, afterbb->loop_father);
1047 bb = bb->next_bb;
1049 return true;
1052 /* Find the next available discriminator value for LOCUS. The
1053 discriminator distinguishes among several basic blocks that
1054 share a common locus, allowing for more accurate sample-based
1055 profiling. */
1057 static int
1058 next_discriminator_for_locus (location_t locus)
1060 struct locus_discrim_map item;
1061 struct locus_discrim_map **slot;
1063 item.locus = locus;
1064 item.discriminator = 0;
1065 slot = discriminator_per_locus->find_slot_with_hash (
1066 &item, LOCATION_LINE (locus), INSERT);
1067 gcc_assert (slot);
1068 if (*slot == HTAB_EMPTY_ENTRY)
1070 *slot = XNEW (struct locus_discrim_map);
1071 gcc_assert (*slot);
1072 (*slot)->locus = locus;
1073 (*slot)->discriminator = 0;
1075 (*slot)->discriminator++;
1076 return (*slot)->discriminator;
1079 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1081 static bool
1082 same_line_p (location_t locus1, location_t locus2)
1084 expanded_location from, to;
1086 if (locus1 == locus2)
1087 return true;
1089 from = expand_location (locus1);
1090 to = expand_location (locus2);
1092 if (from.line != to.line)
1093 return false;
1094 if (from.file == to.file)
1095 return true;
1096 return (from.file != NULL
1097 && to.file != NULL
1098 && filename_cmp (from.file, to.file) == 0);
1101 /* Assign discriminators to each basic block. */
1103 static void
1104 assign_discriminators (void)
1106 basic_block bb;
1108 FOR_EACH_BB_FN (bb, cfun)
1110 edge e;
1111 edge_iterator ei;
1112 gimple *last = last_stmt (bb);
1113 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1115 if (locus == UNKNOWN_LOCATION)
1116 continue;
1118 FOR_EACH_EDGE (e, ei, bb->succs)
1120 gimple *first = first_non_label_stmt (e->dest);
1121 gimple *last = last_stmt (e->dest);
1122 if ((first && same_line_p (locus, gimple_location (first)))
1123 || (last && same_line_p (locus, gimple_location (last))))
1125 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1126 bb->discriminator = next_discriminator_for_locus (locus);
1127 else
1128 e->dest->discriminator = next_discriminator_for_locus (locus);
1134 /* Create the edges for a GIMPLE_COND starting at block BB. */
1136 static void
1137 make_cond_expr_edges (basic_block bb)
1139 gcond *entry = as_a <gcond *> (last_stmt (bb));
1140 gimple *then_stmt, *else_stmt;
1141 basic_block then_bb, else_bb;
1142 tree then_label, else_label;
1143 edge e;
1145 gcc_assert (entry);
1146 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1148 /* Entry basic blocks for each component. */
1149 then_label = gimple_cond_true_label (entry);
1150 else_label = gimple_cond_false_label (entry);
1151 then_bb = label_to_block (then_label);
1152 else_bb = label_to_block (else_label);
1153 then_stmt = first_stmt (then_bb);
1154 else_stmt = first_stmt (else_bb);
1156 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1157 e->goto_locus = gimple_location (then_stmt);
1158 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1159 if (e)
1160 e->goto_locus = gimple_location (else_stmt);
1162 /* We do not need the labels anymore. */
1163 gimple_cond_set_true_label (entry, NULL_TREE);
1164 gimple_cond_set_false_label (entry, NULL_TREE);
1168 /* Called for each element in the hash table (P) as we delete the
1169 edge to cases hash table.
1171 Clear all the CASE_CHAINs to prevent problems with copying of
1172 SWITCH_EXPRs and structure sharing rules, then free the hash table
1173 element. */
1175 bool
1176 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1178 tree t, next;
1180 for (t = value; t; t = next)
1182 next = CASE_CHAIN (t);
1183 CASE_CHAIN (t) = NULL;
1186 return true;
1189 /* Start recording information mapping edges to case labels. */
1191 void
1192 start_recording_case_labels (void)
1194 gcc_assert (edge_to_cases == NULL);
1195 edge_to_cases = new hash_map<edge, tree>;
1196 touched_switch_bbs = BITMAP_ALLOC (NULL);
1199 /* Return nonzero if we are recording information for case labels. */
1201 static bool
1202 recording_case_labels_p (void)
1204 return (edge_to_cases != NULL);
1207 /* Stop recording information mapping edges to case labels and
1208 remove any information we have recorded. */
1209 void
1210 end_recording_case_labels (void)
1212 bitmap_iterator bi;
1213 unsigned i;
1214 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1215 delete edge_to_cases;
1216 edge_to_cases = NULL;
1217 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1219 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1220 if (bb)
1222 gimple *stmt = last_stmt (bb);
1223 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1224 group_case_labels_stmt (as_a <gswitch *> (stmt));
1227 BITMAP_FREE (touched_switch_bbs);
1230 /* If we are inside a {start,end}_recording_cases block, then return
1231 a chain of CASE_LABEL_EXPRs from T which reference E.
1233 Otherwise return NULL. */
1235 static tree
1236 get_cases_for_edge (edge e, gswitch *t)
1238 tree *slot;
1239 size_t i, n;
1241 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1242 chains available. Return NULL so the caller can detect this case. */
1243 if (!recording_case_labels_p ())
1244 return NULL;
1246 slot = edge_to_cases->get (e);
1247 if (slot)
1248 return *slot;
1250 /* If we did not find E in the hash table, then this must be the first
1251 time we have been queried for information about E & T. Add all the
1252 elements from T to the hash table then perform the query again. */
1254 n = gimple_switch_num_labels (t);
1255 for (i = 0; i < n; i++)
1257 tree elt = gimple_switch_label (t, i);
1258 tree lab = CASE_LABEL (elt);
1259 basic_block label_bb = label_to_block (lab);
1260 edge this_edge = find_edge (e->src, label_bb);
1262 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1263 a new chain. */
1264 tree &s = edge_to_cases->get_or_insert (this_edge);
1265 CASE_CHAIN (elt) = s;
1266 s = elt;
1269 return *edge_to_cases->get (e);
1272 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1274 static void
1275 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1277 size_t i, n;
1279 n = gimple_switch_num_labels (entry);
1281 for (i = 0; i < n; ++i)
1283 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1284 basic_block label_bb = label_to_block (lab);
1285 make_edge (bb, label_bb, 0);
1290 /* Return the basic block holding label DEST. */
1292 basic_block
1293 label_to_block_fn (struct function *ifun, tree dest)
1295 int uid = LABEL_DECL_UID (dest);
1297 /* We would die hard when faced by an undefined label. Emit a label to
1298 the very first basic block. This will hopefully make even the dataflow
1299 and undefined variable warnings quite right. */
1300 if (seen_error () && uid < 0)
1302 gimple_stmt_iterator gsi =
1303 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1304 gimple *stmt;
1306 stmt = gimple_build_label (dest);
1307 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1308 uid = LABEL_DECL_UID (dest);
1310 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1311 return NULL;
1312 return (*ifun->cfg->x_label_to_block_map)[uid];
1315 /* Create edges for a goto statement at block BB. Returns true
1316 if abnormal edges should be created. */
1318 static bool
1319 make_goto_expr_edges (basic_block bb)
1321 gimple_stmt_iterator last = gsi_last_bb (bb);
1322 gimple *goto_t = gsi_stmt (last);
1324 /* A simple GOTO creates normal edges. */
1325 if (simple_goto_p (goto_t))
1327 tree dest = gimple_goto_dest (goto_t);
1328 basic_block label_bb = label_to_block (dest);
1329 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1330 e->goto_locus = gimple_location (goto_t);
1331 gsi_remove (&last, true);
1332 return false;
1335 /* A computed GOTO creates abnormal edges. */
1336 return true;
1339 /* Create edges for an asm statement with labels at block BB. */
1341 static void
1342 make_gimple_asm_edges (basic_block bb)
1344 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1345 int i, n = gimple_asm_nlabels (stmt);
1347 for (i = 0; i < n; ++i)
1349 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1350 basic_block label_bb = label_to_block (label);
1351 make_edge (bb, label_bb, 0);
1355 /*---------------------------------------------------------------------------
1356 Flowgraph analysis
1357 ---------------------------------------------------------------------------*/
1359 /* Cleanup useless labels in basic blocks. This is something we wish
1360 to do early because it allows us to group case labels before creating
1361 the edges for the CFG, and it speeds up block statement iterators in
1362 all passes later on.
1363 We rerun this pass after CFG is created, to get rid of the labels that
1364 are no longer referenced. After then we do not run it any more, since
1365 (almost) no new labels should be created. */
1367 /* A map from basic block index to the leading label of that block. */
1368 static struct label_record
1370 /* The label. */
1371 tree label;
1373 /* True if the label is referenced from somewhere. */
1374 bool used;
1375 } *label_for_bb;
1377 /* Given LABEL return the first label in the same basic block. */
1379 static tree
1380 main_block_label (tree label)
1382 basic_block bb = label_to_block (label);
1383 tree main_label = label_for_bb[bb->index].label;
1385 /* label_to_block possibly inserted undefined label into the chain. */
1386 if (!main_label)
1388 label_for_bb[bb->index].label = label;
1389 main_label = label;
1392 label_for_bb[bb->index].used = true;
1393 return main_label;
1396 /* Clean up redundant labels within the exception tree. */
1398 static void
1399 cleanup_dead_labels_eh (void)
1401 eh_landing_pad lp;
1402 eh_region r;
1403 tree lab;
1404 int i;
1406 if (cfun->eh == NULL)
1407 return;
1409 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1410 if (lp && lp->post_landing_pad)
1412 lab = main_block_label (lp->post_landing_pad);
1413 if (lab != lp->post_landing_pad)
1415 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1416 EH_LANDING_PAD_NR (lab) = lp->index;
1420 FOR_ALL_EH_REGION (r)
1421 switch (r->type)
1423 case ERT_CLEANUP:
1424 case ERT_MUST_NOT_THROW:
1425 break;
1427 case ERT_TRY:
1429 eh_catch c;
1430 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1432 lab = c->label;
1433 if (lab)
1434 c->label = main_block_label (lab);
1437 break;
1439 case ERT_ALLOWED_EXCEPTIONS:
1440 lab = r->u.allowed.label;
1441 if (lab)
1442 r->u.allowed.label = main_block_label (lab);
1443 break;
1448 /* Cleanup redundant labels. This is a three-step process:
1449 1) Find the leading label for each block.
1450 2) Redirect all references to labels to the leading labels.
1451 3) Cleanup all useless labels. */
1453 void
1454 cleanup_dead_labels (void)
1456 basic_block bb;
1457 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1459 /* Find a suitable label for each block. We use the first user-defined
1460 label if there is one, or otherwise just the first label we see. */
1461 FOR_EACH_BB_FN (bb, cfun)
1463 gimple_stmt_iterator i;
1465 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1467 tree label;
1468 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1470 if (!label_stmt)
1471 break;
1473 label = gimple_label_label (label_stmt);
1475 /* If we have not yet seen a label for the current block,
1476 remember this one and see if there are more labels. */
1477 if (!label_for_bb[bb->index].label)
1479 label_for_bb[bb->index].label = label;
1480 continue;
1483 /* If we did see a label for the current block already, but it
1484 is an artificially created label, replace it if the current
1485 label is a user defined label. */
1486 if (!DECL_ARTIFICIAL (label)
1487 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1489 label_for_bb[bb->index].label = label;
1490 break;
1495 /* Now redirect all jumps/branches to the selected label.
1496 First do so for each block ending in a control statement. */
1497 FOR_EACH_BB_FN (bb, cfun)
1499 gimple *stmt = last_stmt (bb);
1500 tree label, new_label;
1502 if (!stmt)
1503 continue;
1505 switch (gimple_code (stmt))
1507 case GIMPLE_COND:
1509 gcond *cond_stmt = as_a <gcond *> (stmt);
1510 label = gimple_cond_true_label (cond_stmt);
1511 if (label)
1513 new_label = main_block_label (label);
1514 if (new_label != label)
1515 gimple_cond_set_true_label (cond_stmt, new_label);
1518 label = gimple_cond_false_label (cond_stmt);
1519 if (label)
1521 new_label = main_block_label (label);
1522 if (new_label != label)
1523 gimple_cond_set_false_label (cond_stmt, new_label);
1526 break;
1528 case GIMPLE_SWITCH:
1530 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1531 size_t i, n = gimple_switch_num_labels (switch_stmt);
1533 /* Replace all destination labels. */
1534 for (i = 0; i < n; ++i)
1536 tree case_label = gimple_switch_label (switch_stmt, i);
1537 label = CASE_LABEL (case_label);
1538 new_label = main_block_label (label);
1539 if (new_label != label)
1540 CASE_LABEL (case_label) = new_label;
1542 break;
1545 case GIMPLE_ASM:
1547 gasm *asm_stmt = as_a <gasm *> (stmt);
1548 int i, n = gimple_asm_nlabels (asm_stmt);
1550 for (i = 0; i < n; ++i)
1552 tree cons = gimple_asm_label_op (asm_stmt, i);
1553 tree label = main_block_label (TREE_VALUE (cons));
1554 TREE_VALUE (cons) = label;
1556 break;
1559 /* We have to handle gotos until they're removed, and we don't
1560 remove them until after we've created the CFG edges. */
1561 case GIMPLE_GOTO:
1562 if (!computed_goto_p (stmt))
1564 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1565 label = gimple_goto_dest (goto_stmt);
1566 new_label = main_block_label (label);
1567 if (new_label != label)
1568 gimple_goto_set_dest (goto_stmt, new_label);
1570 break;
1572 case GIMPLE_TRANSACTION:
1574 gtransaction *txn = as_a <gtransaction *> (stmt);
1576 label = gimple_transaction_label_norm (txn);
1577 if (label)
1579 new_label = main_block_label (label);
1580 if (new_label != label)
1581 gimple_transaction_set_label_norm (txn, new_label);
1584 label = gimple_transaction_label_uninst (txn);
1585 if (label)
1587 new_label = main_block_label (label);
1588 if (new_label != label)
1589 gimple_transaction_set_label_uninst (txn, new_label);
1592 label = gimple_transaction_label_over (txn);
1593 if (label)
1595 new_label = main_block_label (label);
1596 if (new_label != label)
1597 gimple_transaction_set_label_over (txn, new_label);
1600 break;
1602 default:
1603 break;
1607 /* Do the same for the exception region tree labels. */
1608 cleanup_dead_labels_eh ();
1610 /* Finally, purge dead labels. All user-defined labels and labels that
1611 can be the target of non-local gotos and labels which have their
1612 address taken are preserved. */
1613 FOR_EACH_BB_FN (bb, cfun)
1615 gimple_stmt_iterator i;
1616 tree label_for_this_bb = label_for_bb[bb->index].label;
1618 if (!label_for_this_bb)
1619 continue;
1621 /* If the main label of the block is unused, we may still remove it. */
1622 if (!label_for_bb[bb->index].used)
1623 label_for_this_bb = NULL;
1625 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1627 tree label;
1628 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1630 if (!label_stmt)
1631 break;
1633 label = gimple_label_label (label_stmt);
1635 if (label == label_for_this_bb
1636 || !DECL_ARTIFICIAL (label)
1637 || DECL_NONLOCAL (label)
1638 || FORCED_LABEL (label))
1639 gsi_next (&i);
1640 else
1641 gsi_remove (&i, true);
1645 free (label_for_bb);
1648 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1649 the ones jumping to the same label.
1650 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1652 void
1653 group_case_labels_stmt (gswitch *stmt)
1655 int old_size = gimple_switch_num_labels (stmt);
1656 int i, j, new_size = old_size;
1657 basic_block default_bb = NULL;
1659 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1661 /* Look for possible opportunities to merge cases. */
1662 i = 1;
1663 while (i < old_size)
1665 tree base_case, base_high;
1666 basic_block base_bb;
1668 base_case = gimple_switch_label (stmt, i);
1670 gcc_assert (base_case);
1671 base_bb = label_to_block (CASE_LABEL (base_case));
1673 /* Discard cases that have the same destination as the
1674 default case. */
1675 if (base_bb == default_bb)
1677 gimple_switch_set_label (stmt, i, NULL_TREE);
1678 i++;
1679 new_size--;
1680 continue;
1683 base_high = CASE_HIGH (base_case)
1684 ? CASE_HIGH (base_case)
1685 : CASE_LOW (base_case);
1686 i++;
1688 /* Try to merge case labels. Break out when we reach the end
1689 of the label vector or when we cannot merge the next case
1690 label with the current one. */
1691 while (i < old_size)
1693 tree merge_case = gimple_switch_label (stmt, i);
1694 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1695 wide_int bhp1 = wi::add (base_high, 1);
1697 /* Merge the cases if they jump to the same place,
1698 and their ranges are consecutive. */
1699 if (merge_bb == base_bb
1700 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1702 base_high = CASE_HIGH (merge_case) ?
1703 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1704 CASE_HIGH (base_case) = base_high;
1705 gimple_switch_set_label (stmt, i, NULL_TREE);
1706 new_size--;
1707 i++;
1709 else
1710 break;
1714 /* Compress the case labels in the label vector, and adjust the
1715 length of the vector. */
1716 for (i = 0, j = 0; i < new_size; i++)
1718 while (! gimple_switch_label (stmt, j))
1719 j++;
1720 gimple_switch_set_label (stmt, i,
1721 gimple_switch_label (stmt, j++));
1724 gcc_assert (new_size <= old_size);
1725 gimple_switch_set_num_labels (stmt, new_size);
1728 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1729 and scan the sorted vector of cases. Combine the ones jumping to the
1730 same label. */
1732 void
1733 group_case_labels (void)
1735 basic_block bb;
1737 FOR_EACH_BB_FN (bb, cfun)
1739 gimple *stmt = last_stmt (bb);
1740 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1741 group_case_labels_stmt (as_a <gswitch *> (stmt));
1745 /* Checks whether we can merge block B into block A. */
1747 static bool
1748 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1750 gimple *stmt;
1752 if (!single_succ_p (a))
1753 return false;
1755 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1756 return false;
1758 if (single_succ (a) != b)
1759 return false;
1761 if (!single_pred_p (b))
1762 return false;
1764 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1765 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1766 return false;
1768 /* If A ends by a statement causing exceptions or something similar, we
1769 cannot merge the blocks. */
1770 stmt = last_stmt (a);
1771 if (stmt && stmt_ends_bb_p (stmt))
1772 return false;
1774 /* Do not allow a block with only a non-local label to be merged. */
1775 if (stmt)
1776 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1777 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1778 return false;
1780 /* Examine the labels at the beginning of B. */
1781 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1782 gsi_next (&gsi))
1784 tree lab;
1785 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1786 if (!label_stmt)
1787 break;
1788 lab = gimple_label_label (label_stmt);
1790 /* Do not remove user forced labels or for -O0 any user labels. */
1791 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1792 return false;
1795 /* Protect simple loop latches. We only want to avoid merging
1796 the latch with the loop header or with a block in another
1797 loop in this case. */
1798 if (current_loops
1799 && b->loop_father->latch == b
1800 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1801 && (b->loop_father->header == a
1802 || b->loop_father != a->loop_father))
1803 return false;
1805 /* It must be possible to eliminate all phi nodes in B. If ssa form
1806 is not up-to-date and a name-mapping is registered, we cannot eliminate
1807 any phis. Symbols marked for renaming are never a problem though. */
1808 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1809 gsi_next (&gsi))
1811 gphi *phi = gsi.phi ();
1812 /* Technically only new names matter. */
1813 if (name_registered_for_update_p (PHI_RESULT (phi)))
1814 return false;
1817 /* When not optimizing, don't merge if we'd lose goto_locus. */
1818 if (!optimize
1819 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1821 location_t goto_locus = single_succ_edge (a)->goto_locus;
1822 gimple_stmt_iterator prev, next;
1823 prev = gsi_last_nondebug_bb (a);
1824 next = gsi_after_labels (b);
1825 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1826 gsi_next_nondebug (&next);
1827 if ((gsi_end_p (prev)
1828 || gimple_location (gsi_stmt (prev)) != goto_locus)
1829 && (gsi_end_p (next)
1830 || gimple_location (gsi_stmt (next)) != goto_locus))
1831 return false;
1834 return true;
1837 /* Replaces all uses of NAME by VAL. */
1839 void
1840 replace_uses_by (tree name, tree val)
1842 imm_use_iterator imm_iter;
1843 use_operand_p use;
1844 gimple *stmt;
1845 edge e;
1847 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1849 /* Mark the block if we change the last stmt in it. */
1850 if (cfgcleanup_altered_bbs
1851 && stmt_ends_bb_p (stmt))
1852 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1854 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1856 replace_exp (use, val);
1858 if (gimple_code (stmt) == GIMPLE_PHI)
1860 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1861 PHI_ARG_INDEX_FROM_USE (use));
1862 if (e->flags & EDGE_ABNORMAL
1863 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1865 /* This can only occur for virtual operands, since
1866 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1867 would prevent replacement. */
1868 gcc_checking_assert (virtual_operand_p (name));
1869 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1874 if (gimple_code (stmt) != GIMPLE_PHI)
1876 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1877 gimple *orig_stmt = stmt;
1878 size_t i;
1880 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1881 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1882 only change sth from non-invariant to invariant, and only
1883 when propagating constants. */
1884 if (is_gimple_min_invariant (val))
1885 for (i = 0; i < gimple_num_ops (stmt); i++)
1887 tree op = gimple_op (stmt, i);
1888 /* Operands may be empty here. For example, the labels
1889 of a GIMPLE_COND are nulled out following the creation
1890 of the corresponding CFG edges. */
1891 if (op && TREE_CODE (op) == ADDR_EXPR)
1892 recompute_tree_invariant_for_addr_expr (op);
1895 if (fold_stmt (&gsi))
1896 stmt = gsi_stmt (gsi);
1898 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1899 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1901 update_stmt (stmt);
1905 gcc_checking_assert (has_zero_uses (name));
1907 /* Also update the trees stored in loop structures. */
1908 if (current_loops)
1910 struct loop *loop;
1912 FOR_EACH_LOOP (loop, 0)
1914 substitute_in_loop_info (loop, name, val);
1919 /* Merge block B into block A. */
1921 static void
1922 gimple_merge_blocks (basic_block a, basic_block b)
1924 gimple_stmt_iterator last, gsi;
1925 gphi_iterator psi;
1927 if (dump_file)
1928 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1930 /* Remove all single-valued PHI nodes from block B of the form
1931 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1932 gsi = gsi_last_bb (a);
1933 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1935 gimple *phi = gsi_stmt (psi);
1936 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1937 gimple *copy;
1938 bool may_replace_uses = (virtual_operand_p (def)
1939 || may_propagate_copy (def, use));
1941 /* In case we maintain loop closed ssa form, do not propagate arguments
1942 of loop exit phi nodes. */
1943 if (current_loops
1944 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1945 && !virtual_operand_p (def)
1946 && TREE_CODE (use) == SSA_NAME
1947 && a->loop_father != b->loop_father)
1948 may_replace_uses = false;
1950 if (!may_replace_uses)
1952 gcc_assert (!virtual_operand_p (def));
1954 /* Note that just emitting the copies is fine -- there is no problem
1955 with ordering of phi nodes. This is because A is the single
1956 predecessor of B, therefore results of the phi nodes cannot
1957 appear as arguments of the phi nodes. */
1958 copy = gimple_build_assign (def, use);
1959 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1960 remove_phi_node (&psi, false);
1962 else
1964 /* If we deal with a PHI for virtual operands, we can simply
1965 propagate these without fussing with folding or updating
1966 the stmt. */
1967 if (virtual_operand_p (def))
1969 imm_use_iterator iter;
1970 use_operand_p use_p;
1971 gimple *stmt;
1973 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1974 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1975 SET_USE (use_p, use);
1977 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1978 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1980 else
1981 replace_uses_by (def, use);
1983 remove_phi_node (&psi, true);
1987 /* Ensure that B follows A. */
1988 move_block_after (b, a);
1990 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1991 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1993 /* Remove labels from B and set gimple_bb to A for other statements. */
1994 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1996 gimple *stmt = gsi_stmt (gsi);
1997 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1999 tree label = gimple_label_label (label_stmt);
2000 int lp_nr;
2002 gsi_remove (&gsi, false);
2004 /* Now that we can thread computed gotos, we might have
2005 a situation where we have a forced label in block B
2006 However, the label at the start of block B might still be
2007 used in other ways (think about the runtime checking for
2008 Fortran assigned gotos). So we can not just delete the
2009 label. Instead we move the label to the start of block A. */
2010 if (FORCED_LABEL (label))
2012 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2013 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2015 /* Other user labels keep around in a form of a debug stmt. */
2016 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2018 gimple *dbg = gimple_build_debug_bind (label,
2019 integer_zero_node,
2020 stmt);
2021 gimple_debug_bind_reset_value (dbg);
2022 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2025 lp_nr = EH_LANDING_PAD_NR (label);
2026 if (lp_nr)
2028 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2029 lp->post_landing_pad = NULL;
2032 else
2034 gimple_set_bb (stmt, a);
2035 gsi_next (&gsi);
2039 /* When merging two BBs, if their counts are different, the larger count
2040 is selected as the new bb count. This is to handle inconsistent
2041 profiles. */
2042 if (a->loop_father == b->loop_father)
2044 a->count = MAX (a->count, b->count);
2045 a->frequency = MAX (a->frequency, b->frequency);
2048 /* Merge the sequences. */
2049 last = gsi_last_bb (a);
2050 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2051 set_bb_seq (b, NULL);
2053 if (cfgcleanup_altered_bbs)
2054 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2058 /* Return the one of two successors of BB that is not reachable by a
2059 complex edge, if there is one. Else, return BB. We use
2060 this in optimizations that use post-dominators for their heuristics,
2061 to catch the cases in C++ where function calls are involved. */
2063 basic_block
2064 single_noncomplex_succ (basic_block bb)
2066 edge e0, e1;
2067 if (EDGE_COUNT (bb->succs) != 2)
2068 return bb;
2070 e0 = EDGE_SUCC (bb, 0);
2071 e1 = EDGE_SUCC (bb, 1);
2072 if (e0->flags & EDGE_COMPLEX)
2073 return e1->dest;
2074 if (e1->flags & EDGE_COMPLEX)
2075 return e0->dest;
2077 return bb;
2080 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2082 void
2083 notice_special_calls (gcall *call)
2085 int flags = gimple_call_flags (call);
2087 if (flags & ECF_MAY_BE_ALLOCA)
2088 cfun->calls_alloca = true;
2089 if (flags & ECF_RETURNS_TWICE)
2090 cfun->calls_setjmp = true;
2094 /* Clear flags set by notice_special_calls. Used by dead code removal
2095 to update the flags. */
2097 void
2098 clear_special_calls (void)
2100 cfun->calls_alloca = false;
2101 cfun->calls_setjmp = false;
2104 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2106 static void
2107 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2109 /* Since this block is no longer reachable, we can just delete all
2110 of its PHI nodes. */
2111 remove_phi_nodes (bb);
2113 /* Remove edges to BB's successors. */
2114 while (EDGE_COUNT (bb->succs) > 0)
2115 remove_edge (EDGE_SUCC (bb, 0));
2119 /* Remove statements of basic block BB. */
2121 static void
2122 remove_bb (basic_block bb)
2124 gimple_stmt_iterator i;
2126 if (dump_file)
2128 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2129 if (dump_flags & TDF_DETAILS)
2131 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2132 fprintf (dump_file, "\n");
2136 if (current_loops)
2138 struct loop *loop = bb->loop_father;
2140 /* If a loop gets removed, clean up the information associated
2141 with it. */
2142 if (loop->latch == bb
2143 || loop->header == bb)
2144 free_numbers_of_iterations_estimates_loop (loop);
2147 /* Remove all the instructions in the block. */
2148 if (bb_seq (bb) != NULL)
2150 /* Walk backwards so as to get a chance to substitute all
2151 released DEFs into debug stmts. See
2152 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2153 details. */
2154 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2156 gimple *stmt = gsi_stmt (i);
2157 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2158 if (label_stmt
2159 && (FORCED_LABEL (gimple_label_label (label_stmt))
2160 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2162 basic_block new_bb;
2163 gimple_stmt_iterator new_gsi;
2165 /* A non-reachable non-local label may still be referenced.
2166 But it no longer needs to carry the extra semantics of
2167 non-locality. */
2168 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2170 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2171 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2174 new_bb = bb->prev_bb;
2175 new_gsi = gsi_start_bb (new_bb);
2176 gsi_remove (&i, false);
2177 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2179 else
2181 /* Release SSA definitions. */
2182 release_defs (stmt);
2183 gsi_remove (&i, true);
2186 if (gsi_end_p (i))
2187 i = gsi_last_bb (bb);
2188 else
2189 gsi_prev (&i);
2193 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2194 bb->il.gimple.seq = NULL;
2195 bb->il.gimple.phi_nodes = NULL;
2199 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2200 predicate VAL, return the edge that will be taken out of the block.
2201 If VAL does not match a unique edge, NULL is returned. */
2203 edge
2204 find_taken_edge (basic_block bb, tree val)
2206 gimple *stmt;
2208 stmt = last_stmt (bb);
2210 gcc_assert (stmt);
2211 gcc_assert (is_ctrl_stmt (stmt));
2213 if (val == NULL)
2214 return NULL;
2216 if (!is_gimple_min_invariant (val))
2217 return NULL;
2219 if (gimple_code (stmt) == GIMPLE_COND)
2220 return find_taken_edge_cond_expr (bb, val);
2222 if (gimple_code (stmt) == GIMPLE_SWITCH)
2223 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2225 if (computed_goto_p (stmt))
2227 /* Only optimize if the argument is a label, if the argument is
2228 not a label then we can not construct a proper CFG.
2230 It may be the case that we only need to allow the LABEL_REF to
2231 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2232 appear inside a LABEL_EXPR just to be safe. */
2233 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2234 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2235 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2236 return NULL;
2239 gcc_unreachable ();
2242 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2243 statement, determine which of the outgoing edges will be taken out of the
2244 block. Return NULL if either edge may be taken. */
2246 static edge
2247 find_taken_edge_computed_goto (basic_block bb, tree val)
2249 basic_block dest;
2250 edge e = NULL;
2252 dest = label_to_block (val);
2253 if (dest)
2255 e = find_edge (bb, dest);
2256 gcc_assert (e != NULL);
2259 return e;
2262 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2263 statement, determine which of the two edges will be taken out of the
2264 block. Return NULL if either edge may be taken. */
2266 static edge
2267 find_taken_edge_cond_expr (basic_block bb, tree val)
2269 edge true_edge, false_edge;
2271 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2273 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2274 return (integer_zerop (val) ? false_edge : true_edge);
2277 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2278 statement, determine which edge will be taken out of the block. Return
2279 NULL if any edge may be taken. */
2281 static edge
2282 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2283 tree val)
2285 basic_block dest_bb;
2286 edge e;
2287 tree taken_case;
2289 taken_case = find_case_label_for_value (switch_stmt, val);
2290 dest_bb = label_to_block (CASE_LABEL (taken_case));
2292 e = find_edge (bb, dest_bb);
2293 gcc_assert (e);
2294 return e;
2298 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2299 We can make optimal use here of the fact that the case labels are
2300 sorted: We can do a binary search for a case matching VAL. */
2302 static tree
2303 find_case_label_for_value (gswitch *switch_stmt, tree val)
2305 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2306 tree default_case = gimple_switch_default_label (switch_stmt);
2308 for (low = 0, high = n; high - low > 1; )
2310 size_t i = (high + low) / 2;
2311 tree t = gimple_switch_label (switch_stmt, i);
2312 int cmp;
2314 /* Cache the result of comparing CASE_LOW and val. */
2315 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2317 if (cmp > 0)
2318 high = i;
2319 else
2320 low = i;
2322 if (CASE_HIGH (t) == NULL)
2324 /* A singe-valued case label. */
2325 if (cmp == 0)
2326 return t;
2328 else
2330 /* A case range. We can only handle integer ranges. */
2331 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2332 return t;
2336 return default_case;
2340 /* Dump a basic block on stderr. */
2342 void
2343 gimple_debug_bb (basic_block bb)
2345 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2349 /* Dump basic block with index N on stderr. */
2351 basic_block
2352 gimple_debug_bb_n (int n)
2354 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2355 return BASIC_BLOCK_FOR_FN (cfun, n);
2359 /* Dump the CFG on stderr.
2361 FLAGS are the same used by the tree dumping functions
2362 (see TDF_* in dumpfile.h). */
2364 void
2365 gimple_debug_cfg (int flags)
2367 gimple_dump_cfg (stderr, flags);
2371 /* Dump the program showing basic block boundaries on the given FILE.
2373 FLAGS are the same used by the tree dumping functions (see TDF_* in
2374 tree.h). */
2376 void
2377 gimple_dump_cfg (FILE *file, int flags)
2379 if (flags & TDF_DETAILS)
2381 dump_function_header (file, current_function_decl, flags);
2382 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2383 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2384 last_basic_block_for_fn (cfun));
2386 brief_dump_cfg (file, flags | TDF_COMMENT);
2387 fprintf (file, "\n");
2390 if (flags & TDF_STATS)
2391 dump_cfg_stats (file);
2393 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2397 /* Dump CFG statistics on FILE. */
2399 void
2400 dump_cfg_stats (FILE *file)
2402 static long max_num_merged_labels = 0;
2403 unsigned long size, total = 0;
2404 long num_edges;
2405 basic_block bb;
2406 const char * const fmt_str = "%-30s%-13s%12s\n";
2407 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2408 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2409 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2410 const char *funcname = current_function_name ();
2412 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2414 fprintf (file, "---------------------------------------------------------\n");
2415 fprintf (file, fmt_str, "", " Number of ", "Memory");
2416 fprintf (file, fmt_str, "", " instances ", "used ");
2417 fprintf (file, "---------------------------------------------------------\n");
2419 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2420 total += size;
2421 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2422 SCALE (size), LABEL (size));
2424 num_edges = 0;
2425 FOR_EACH_BB_FN (bb, cfun)
2426 num_edges += EDGE_COUNT (bb->succs);
2427 size = num_edges * sizeof (struct edge_def);
2428 total += size;
2429 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2431 fprintf (file, "---------------------------------------------------------\n");
2432 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2433 LABEL (total));
2434 fprintf (file, "---------------------------------------------------------\n");
2435 fprintf (file, "\n");
2437 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2438 max_num_merged_labels = cfg_stats.num_merged_labels;
2440 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2441 cfg_stats.num_merged_labels, max_num_merged_labels);
2443 fprintf (file, "\n");
2447 /* Dump CFG statistics on stderr. Keep extern so that it's always
2448 linked in the final executable. */
2450 DEBUG_FUNCTION void
2451 debug_cfg_stats (void)
2453 dump_cfg_stats (stderr);
2456 /*---------------------------------------------------------------------------
2457 Miscellaneous helpers
2458 ---------------------------------------------------------------------------*/
2460 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2461 flow. Transfers of control flow associated with EH are excluded. */
2463 static bool
2464 call_can_make_abnormal_goto (gimple *t)
2466 /* If the function has no non-local labels, then a call cannot make an
2467 abnormal transfer of control. */
2468 if (!cfun->has_nonlocal_label
2469 && !cfun->calls_setjmp)
2470 return false;
2472 /* Likewise if the call has no side effects. */
2473 if (!gimple_has_side_effects (t))
2474 return false;
2476 /* Likewise if the called function is leaf. */
2477 if (gimple_call_flags (t) & ECF_LEAF)
2478 return false;
2480 return true;
2484 /* Return true if T can make an abnormal transfer of control flow.
2485 Transfers of control flow associated with EH are excluded. */
2487 bool
2488 stmt_can_make_abnormal_goto (gimple *t)
2490 if (computed_goto_p (t))
2491 return true;
2492 if (is_gimple_call (t))
2493 return call_can_make_abnormal_goto (t);
2494 return false;
2498 /* Return true if T represents a stmt that always transfers control. */
2500 bool
2501 is_ctrl_stmt (gimple *t)
2503 switch (gimple_code (t))
2505 case GIMPLE_COND:
2506 case GIMPLE_SWITCH:
2507 case GIMPLE_GOTO:
2508 case GIMPLE_RETURN:
2509 case GIMPLE_RESX:
2510 return true;
2511 default:
2512 return false;
2517 /* Return true if T is a statement that may alter the flow of control
2518 (e.g., a call to a non-returning function). */
2520 bool
2521 is_ctrl_altering_stmt (gimple *t)
2523 gcc_assert (t);
2525 switch (gimple_code (t))
2527 case GIMPLE_CALL:
2528 /* Per stmt call flag indicates whether the call could alter
2529 controlflow. */
2530 if (gimple_call_ctrl_altering_p (t))
2531 return true;
2532 break;
2534 case GIMPLE_EH_DISPATCH:
2535 /* EH_DISPATCH branches to the individual catch handlers at
2536 this level of a try or allowed-exceptions region. It can
2537 fallthru to the next statement as well. */
2538 return true;
2540 case GIMPLE_ASM:
2541 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2542 return true;
2543 break;
2545 CASE_GIMPLE_OMP:
2546 /* OpenMP directives alter control flow. */
2547 return true;
2549 case GIMPLE_TRANSACTION:
2550 /* A transaction start alters control flow. */
2551 return true;
2553 default:
2554 break;
2557 /* If a statement can throw, it alters control flow. */
2558 return stmt_can_throw_internal (t);
2562 /* Return true if T is a simple local goto. */
2564 bool
2565 simple_goto_p (gimple *t)
2567 return (gimple_code (t) == GIMPLE_GOTO
2568 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2572 /* Return true if STMT should start a new basic block. PREV_STMT is
2573 the statement preceding STMT. It is used when STMT is a label or a
2574 case label. Labels should only start a new basic block if their
2575 previous statement wasn't a label. Otherwise, sequence of labels
2576 would generate unnecessary basic blocks that only contain a single
2577 label. */
2579 static inline bool
2580 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2582 if (stmt == NULL)
2583 return false;
2585 /* Labels start a new basic block only if the preceding statement
2586 wasn't a label of the same type. This prevents the creation of
2587 consecutive blocks that have nothing but a single label. */
2588 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2590 /* Nonlocal and computed GOTO targets always start a new block. */
2591 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2592 || FORCED_LABEL (gimple_label_label (label_stmt)))
2593 return true;
2595 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2597 if (DECL_NONLOCAL (gimple_label_label (
2598 as_a <glabel *> (prev_stmt))))
2599 return true;
2601 cfg_stats.num_merged_labels++;
2602 return false;
2604 else
2605 return true;
2607 else if (gimple_code (stmt) == GIMPLE_CALL
2608 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2609 /* setjmp acts similar to a nonlocal GOTO target and thus should
2610 start a new block. */
2611 return true;
2613 return false;
2617 /* Return true if T should end a basic block. */
2619 bool
2620 stmt_ends_bb_p (gimple *t)
2622 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2625 /* Remove block annotations and other data structures. */
2627 void
2628 delete_tree_cfg_annotations (struct function *fn)
2630 vec_free (label_to_block_map_for_fn (fn));
2633 /* Return the virtual phi in BB. */
2635 gphi *
2636 get_virtual_phi (basic_block bb)
2638 for (gphi_iterator gsi = gsi_start_phis (bb);
2639 !gsi_end_p (gsi);
2640 gsi_next (&gsi))
2642 gphi *phi = gsi.phi ();
2644 if (virtual_operand_p (PHI_RESULT (phi)))
2645 return phi;
2648 return NULL;
2651 /* Return the first statement in basic block BB. */
2653 gimple *
2654 first_stmt (basic_block bb)
2656 gimple_stmt_iterator i = gsi_start_bb (bb);
2657 gimple *stmt = NULL;
2659 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2661 gsi_next (&i);
2662 stmt = NULL;
2664 return stmt;
2667 /* Return the first non-label statement in basic block BB. */
2669 static gimple *
2670 first_non_label_stmt (basic_block bb)
2672 gimple_stmt_iterator i = gsi_start_bb (bb);
2673 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2674 gsi_next (&i);
2675 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2678 /* Return the last statement in basic block BB. */
2680 gimple *
2681 last_stmt (basic_block bb)
2683 gimple_stmt_iterator i = gsi_last_bb (bb);
2684 gimple *stmt = NULL;
2686 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2688 gsi_prev (&i);
2689 stmt = NULL;
2691 return stmt;
2694 /* Return the last statement of an otherwise empty block. Return NULL
2695 if the block is totally empty, or if it contains more than one
2696 statement. */
2698 gimple *
2699 last_and_only_stmt (basic_block bb)
2701 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2702 gimple *last, *prev;
2704 if (gsi_end_p (i))
2705 return NULL;
2707 last = gsi_stmt (i);
2708 gsi_prev_nondebug (&i);
2709 if (gsi_end_p (i))
2710 return last;
2712 /* Empty statements should no longer appear in the instruction stream.
2713 Everything that might have appeared before should be deleted by
2714 remove_useless_stmts, and the optimizers should just gsi_remove
2715 instead of smashing with build_empty_stmt.
2717 Thus the only thing that should appear here in a block containing
2718 one executable statement is a label. */
2719 prev = gsi_stmt (i);
2720 if (gimple_code (prev) == GIMPLE_LABEL)
2721 return last;
2722 else
2723 return NULL;
2726 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2728 static void
2729 reinstall_phi_args (edge new_edge, edge old_edge)
2731 edge_var_map *vm;
2732 int i;
2733 gphi_iterator phis;
2735 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2736 if (!v)
2737 return;
2739 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2740 v->iterate (i, &vm) && !gsi_end_p (phis);
2741 i++, gsi_next (&phis))
2743 gphi *phi = phis.phi ();
2744 tree result = redirect_edge_var_map_result (vm);
2745 tree arg = redirect_edge_var_map_def (vm);
2747 gcc_assert (result == gimple_phi_result (phi));
2749 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2752 redirect_edge_var_map_clear (old_edge);
2755 /* Returns the basic block after which the new basic block created
2756 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2757 near its "logical" location. This is of most help to humans looking
2758 at debugging dumps. */
2760 basic_block
2761 split_edge_bb_loc (edge edge_in)
2763 basic_block dest = edge_in->dest;
2764 basic_block dest_prev = dest->prev_bb;
2766 if (dest_prev)
2768 edge e = find_edge (dest_prev, dest);
2769 if (e && !(e->flags & EDGE_COMPLEX))
2770 return edge_in->src;
2772 return dest_prev;
2775 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2776 Abort on abnormal edges. */
2778 static basic_block
2779 gimple_split_edge (edge edge_in)
2781 basic_block new_bb, after_bb, dest;
2782 edge new_edge, e;
2784 /* Abnormal edges cannot be split. */
2785 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2787 dest = edge_in->dest;
2789 after_bb = split_edge_bb_loc (edge_in);
2791 new_bb = create_empty_bb (after_bb);
2792 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2793 new_bb->count = edge_in->count;
2794 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2795 new_edge->probability = REG_BR_PROB_BASE;
2796 new_edge->count = edge_in->count;
2798 e = redirect_edge_and_branch (edge_in, new_bb);
2799 gcc_assert (e == edge_in);
2800 reinstall_phi_args (new_edge, e);
2802 return new_bb;
2806 /* Verify properties of the address expression T with base object BASE. */
2808 static tree
2809 verify_address (tree t, tree base)
2811 bool old_constant;
2812 bool old_side_effects;
2813 bool new_constant;
2814 bool new_side_effects;
2816 old_constant = TREE_CONSTANT (t);
2817 old_side_effects = TREE_SIDE_EFFECTS (t);
2819 recompute_tree_invariant_for_addr_expr (t);
2820 new_side_effects = TREE_SIDE_EFFECTS (t);
2821 new_constant = TREE_CONSTANT (t);
2823 if (old_constant != new_constant)
2825 error ("constant not recomputed when ADDR_EXPR changed");
2826 return t;
2828 if (old_side_effects != new_side_effects)
2830 error ("side effects not recomputed when ADDR_EXPR changed");
2831 return t;
2834 if (!(VAR_P (base)
2835 || TREE_CODE (base) == PARM_DECL
2836 || TREE_CODE (base) == RESULT_DECL))
2837 return NULL_TREE;
2839 if (DECL_GIMPLE_REG_P (base))
2841 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2842 return base;
2845 return NULL_TREE;
2848 /* Callback for walk_tree, check that all elements with address taken are
2849 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2850 inside a PHI node. */
2852 static tree
2853 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2855 tree t = *tp, x;
2857 if (TYPE_P (t))
2858 *walk_subtrees = 0;
2860 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2861 #define CHECK_OP(N, MSG) \
2862 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2863 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2865 switch (TREE_CODE (t))
2867 case SSA_NAME:
2868 if (SSA_NAME_IN_FREE_LIST (t))
2870 error ("SSA name in freelist but still referenced");
2871 return *tp;
2873 break;
2875 case PARM_DECL:
2876 case VAR_DECL:
2877 case RESULT_DECL:
2879 tree context = decl_function_context (t);
2880 if (context != cfun->decl
2881 && !SCOPE_FILE_SCOPE_P (context)
2882 && !TREE_STATIC (t)
2883 && !DECL_EXTERNAL (t))
2885 error ("Local declaration from a different function");
2886 return t;
2889 break;
2891 case INDIRECT_REF:
2892 error ("INDIRECT_REF in gimple IL");
2893 return t;
2895 case MEM_REF:
2896 x = TREE_OPERAND (t, 0);
2897 if (!POINTER_TYPE_P (TREE_TYPE (x))
2898 || !is_gimple_mem_ref_addr (x))
2900 error ("invalid first operand of MEM_REF");
2901 return x;
2903 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2904 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2906 error ("invalid offset operand of MEM_REF");
2907 return TREE_OPERAND (t, 1);
2909 if (TREE_CODE (x) == ADDR_EXPR)
2911 tree va = verify_address (x, TREE_OPERAND (x, 0));
2912 if (va)
2913 return va;
2914 x = TREE_OPERAND (x, 0);
2916 walk_tree (&x, verify_expr, data, NULL);
2917 *walk_subtrees = 0;
2918 break;
2920 case ASSERT_EXPR:
2921 x = fold (ASSERT_EXPR_COND (t));
2922 if (x == boolean_false_node)
2924 error ("ASSERT_EXPR with an always-false condition");
2925 return *tp;
2927 break;
2929 case MODIFY_EXPR:
2930 error ("MODIFY_EXPR not expected while having tuples");
2931 return *tp;
2933 case ADDR_EXPR:
2935 tree tem;
2937 gcc_assert (is_gimple_address (t));
2939 /* Skip any references (they will be checked when we recurse down the
2940 tree) and ensure that any variable used as a prefix is marked
2941 addressable. */
2942 for (x = TREE_OPERAND (t, 0);
2943 handled_component_p (x);
2944 x = TREE_OPERAND (x, 0))
2947 if ((tem = verify_address (t, x)))
2948 return tem;
2950 if (!(VAR_P (x)
2951 || TREE_CODE (x) == PARM_DECL
2952 || TREE_CODE (x) == RESULT_DECL))
2953 return NULL;
2955 if (!TREE_ADDRESSABLE (x))
2957 error ("address taken, but ADDRESSABLE bit not set");
2958 return x;
2961 break;
2964 case COND_EXPR:
2965 x = COND_EXPR_COND (t);
2966 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2968 error ("non-integral used in condition");
2969 return x;
2971 if (!is_gimple_condexpr (x))
2973 error ("invalid conditional operand");
2974 return x;
2976 break;
2978 case NON_LVALUE_EXPR:
2979 case TRUTH_NOT_EXPR:
2980 gcc_unreachable ();
2982 CASE_CONVERT:
2983 case FIX_TRUNC_EXPR:
2984 case FLOAT_EXPR:
2985 case NEGATE_EXPR:
2986 case ABS_EXPR:
2987 case BIT_NOT_EXPR:
2988 CHECK_OP (0, "invalid operand to unary operator");
2989 break;
2991 case REALPART_EXPR:
2992 case IMAGPART_EXPR:
2993 case BIT_FIELD_REF:
2994 if (!is_gimple_reg_type (TREE_TYPE (t)))
2996 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2997 return t;
3000 if (TREE_CODE (t) == BIT_FIELD_REF)
3002 tree t0 = TREE_OPERAND (t, 0);
3003 tree t1 = TREE_OPERAND (t, 1);
3004 tree t2 = TREE_OPERAND (t, 2);
3005 if (!tree_fits_uhwi_p (t1)
3006 || !tree_fits_uhwi_p (t2))
3008 error ("invalid position or size operand to BIT_FIELD_REF");
3009 return t;
3011 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3012 && (TYPE_PRECISION (TREE_TYPE (t))
3013 != tree_to_uhwi (t1)))
3015 error ("integral result type precision does not match "
3016 "field size of BIT_FIELD_REF");
3017 return t;
3019 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3020 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3021 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3022 != tree_to_uhwi (t1)))
3024 error ("mode size of non-integral result does not "
3025 "match field size of BIT_FIELD_REF");
3026 return t;
3028 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3029 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3030 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3032 error ("position plus size exceeds size of referenced object in "
3033 "BIT_FIELD_REF");
3034 return t;
3037 t = TREE_OPERAND (t, 0);
3039 /* Fall-through. */
3040 case COMPONENT_REF:
3041 case ARRAY_REF:
3042 case ARRAY_RANGE_REF:
3043 case VIEW_CONVERT_EXPR:
3044 /* We have a nest of references. Verify that each of the operands
3045 that determine where to reference is either a constant or a variable,
3046 verify that the base is valid, and then show we've already checked
3047 the subtrees. */
3048 while (handled_component_p (t))
3050 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3051 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3052 else if (TREE_CODE (t) == ARRAY_REF
3053 || TREE_CODE (t) == ARRAY_RANGE_REF)
3055 CHECK_OP (1, "invalid array index");
3056 if (TREE_OPERAND (t, 2))
3057 CHECK_OP (2, "invalid array lower bound");
3058 if (TREE_OPERAND (t, 3))
3059 CHECK_OP (3, "invalid array stride");
3061 else if (TREE_CODE (t) == BIT_FIELD_REF
3062 || TREE_CODE (t) == REALPART_EXPR
3063 || TREE_CODE (t) == IMAGPART_EXPR)
3065 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3066 "REALPART_EXPR");
3067 return t;
3070 t = TREE_OPERAND (t, 0);
3073 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3075 error ("invalid reference prefix");
3076 return t;
3078 walk_tree (&t, verify_expr, data, NULL);
3079 *walk_subtrees = 0;
3080 break;
3081 case PLUS_EXPR:
3082 case MINUS_EXPR:
3083 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3084 POINTER_PLUS_EXPR. */
3085 if (POINTER_TYPE_P (TREE_TYPE (t)))
3087 error ("invalid operand to plus/minus, type is a pointer");
3088 return t;
3090 CHECK_OP (0, "invalid operand to binary operator");
3091 CHECK_OP (1, "invalid operand to binary operator");
3092 break;
3094 case POINTER_PLUS_EXPR:
3095 /* Check to make sure the first operand is a pointer or reference type. */
3096 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3098 error ("invalid operand to pointer plus, first operand is not a pointer");
3099 return t;
3101 /* Check to make sure the second operand is a ptrofftype. */
3102 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3104 error ("invalid operand to pointer plus, second operand is not an "
3105 "integer type of appropriate width");
3106 return t;
3108 /* FALLTHROUGH */
3109 case LT_EXPR:
3110 case LE_EXPR:
3111 case GT_EXPR:
3112 case GE_EXPR:
3113 case EQ_EXPR:
3114 case NE_EXPR:
3115 case UNORDERED_EXPR:
3116 case ORDERED_EXPR:
3117 case UNLT_EXPR:
3118 case UNLE_EXPR:
3119 case UNGT_EXPR:
3120 case UNGE_EXPR:
3121 case UNEQ_EXPR:
3122 case LTGT_EXPR:
3123 case MULT_EXPR:
3124 case TRUNC_DIV_EXPR:
3125 case CEIL_DIV_EXPR:
3126 case FLOOR_DIV_EXPR:
3127 case ROUND_DIV_EXPR:
3128 case TRUNC_MOD_EXPR:
3129 case CEIL_MOD_EXPR:
3130 case FLOOR_MOD_EXPR:
3131 case ROUND_MOD_EXPR:
3132 case RDIV_EXPR:
3133 case EXACT_DIV_EXPR:
3134 case MIN_EXPR:
3135 case MAX_EXPR:
3136 case LSHIFT_EXPR:
3137 case RSHIFT_EXPR:
3138 case LROTATE_EXPR:
3139 case RROTATE_EXPR:
3140 case BIT_IOR_EXPR:
3141 case BIT_XOR_EXPR:
3142 case BIT_AND_EXPR:
3143 CHECK_OP (0, "invalid operand to binary operator");
3144 CHECK_OP (1, "invalid operand to binary operator");
3145 break;
3147 case CONSTRUCTOR:
3148 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3149 *walk_subtrees = 0;
3150 break;
3152 case CASE_LABEL_EXPR:
3153 if (CASE_CHAIN (t))
3155 error ("invalid CASE_CHAIN");
3156 return t;
3158 break;
3160 default:
3161 break;
3163 return NULL;
3165 #undef CHECK_OP
3169 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3170 Returns true if there is an error, otherwise false. */
3172 static bool
3173 verify_types_in_gimple_min_lval (tree expr)
3175 tree op;
3177 if (is_gimple_id (expr))
3178 return false;
3180 if (TREE_CODE (expr) != TARGET_MEM_REF
3181 && TREE_CODE (expr) != MEM_REF)
3183 error ("invalid expression for min lvalue");
3184 return true;
3187 /* TARGET_MEM_REFs are strange beasts. */
3188 if (TREE_CODE (expr) == TARGET_MEM_REF)
3189 return false;
3191 op = TREE_OPERAND (expr, 0);
3192 if (!is_gimple_val (op))
3194 error ("invalid operand in indirect reference");
3195 debug_generic_stmt (op);
3196 return true;
3198 /* Memory references now generally can involve a value conversion. */
3200 return false;
3203 /* Verify if EXPR is a valid GIMPLE reference expression. If
3204 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3205 if there is an error, otherwise false. */
3207 static bool
3208 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3210 while (handled_component_p (expr))
3212 tree op = TREE_OPERAND (expr, 0);
3214 if (TREE_CODE (expr) == ARRAY_REF
3215 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3217 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3218 || (TREE_OPERAND (expr, 2)
3219 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3220 || (TREE_OPERAND (expr, 3)
3221 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3223 error ("invalid operands to array reference");
3224 debug_generic_stmt (expr);
3225 return true;
3229 /* Verify if the reference array element types are compatible. */
3230 if (TREE_CODE (expr) == ARRAY_REF
3231 && !useless_type_conversion_p (TREE_TYPE (expr),
3232 TREE_TYPE (TREE_TYPE (op))))
3234 error ("type mismatch in array reference");
3235 debug_generic_stmt (TREE_TYPE (expr));
3236 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3237 return true;
3239 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3240 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3241 TREE_TYPE (TREE_TYPE (op))))
3243 error ("type mismatch in array range reference");
3244 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3245 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3246 return true;
3249 if ((TREE_CODE (expr) == REALPART_EXPR
3250 || TREE_CODE (expr) == IMAGPART_EXPR)
3251 && !useless_type_conversion_p (TREE_TYPE (expr),
3252 TREE_TYPE (TREE_TYPE (op))))
3254 error ("type mismatch in real/imagpart reference");
3255 debug_generic_stmt (TREE_TYPE (expr));
3256 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3257 return true;
3260 if (TREE_CODE (expr) == COMPONENT_REF
3261 && !useless_type_conversion_p (TREE_TYPE (expr),
3262 TREE_TYPE (TREE_OPERAND (expr, 1))))
3264 error ("type mismatch in component reference");
3265 debug_generic_stmt (TREE_TYPE (expr));
3266 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3267 return true;
3270 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3272 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3273 that their operand is not an SSA name or an invariant when
3274 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3275 bug). Otherwise there is nothing to verify, gross mismatches at
3276 most invoke undefined behavior. */
3277 if (require_lvalue
3278 && (TREE_CODE (op) == SSA_NAME
3279 || is_gimple_min_invariant (op)))
3281 error ("conversion of an SSA_NAME on the left hand side");
3282 debug_generic_stmt (expr);
3283 return true;
3285 else if (TREE_CODE (op) == SSA_NAME
3286 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3288 error ("conversion of register to a different size");
3289 debug_generic_stmt (expr);
3290 return true;
3292 else if (!handled_component_p (op))
3293 return false;
3296 expr = op;
3299 if (TREE_CODE (expr) == MEM_REF)
3301 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3303 error ("invalid address operand in MEM_REF");
3304 debug_generic_stmt (expr);
3305 return true;
3307 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3308 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3310 error ("invalid offset operand in MEM_REF");
3311 debug_generic_stmt (expr);
3312 return true;
3315 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3317 if (!TMR_BASE (expr)
3318 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3320 error ("invalid address operand in TARGET_MEM_REF");
3321 return true;
3323 if (!TMR_OFFSET (expr)
3324 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3325 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3327 error ("invalid offset operand in TARGET_MEM_REF");
3328 debug_generic_stmt (expr);
3329 return true;
3333 return ((require_lvalue || !is_gimple_min_invariant (expr))
3334 && verify_types_in_gimple_min_lval (expr));
3337 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3338 list of pointer-to types that is trivially convertible to DEST. */
3340 static bool
3341 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3343 tree src;
3345 if (!TYPE_POINTER_TO (src_obj))
3346 return true;
3348 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3349 if (useless_type_conversion_p (dest, src))
3350 return true;
3352 return false;
3355 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3356 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3358 static bool
3359 valid_fixed_convert_types_p (tree type1, tree type2)
3361 return (FIXED_POINT_TYPE_P (type1)
3362 && (INTEGRAL_TYPE_P (type2)
3363 || SCALAR_FLOAT_TYPE_P (type2)
3364 || FIXED_POINT_TYPE_P (type2)));
3367 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3368 is a problem, otherwise false. */
3370 static bool
3371 verify_gimple_call (gcall *stmt)
3373 tree fn = gimple_call_fn (stmt);
3374 tree fntype, fndecl;
3375 unsigned i;
3377 if (gimple_call_internal_p (stmt))
3379 if (fn)
3381 error ("gimple call has two targets");
3382 debug_generic_stmt (fn);
3383 return true;
3385 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3386 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3388 return false;
3391 else
3393 if (!fn)
3395 error ("gimple call has no target");
3396 return true;
3400 if (fn && !is_gimple_call_addr (fn))
3402 error ("invalid function in gimple call");
3403 debug_generic_stmt (fn);
3404 return true;
3407 if (fn
3408 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3409 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3410 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3412 error ("non-function in gimple call");
3413 return true;
3416 fndecl = gimple_call_fndecl (stmt);
3417 if (fndecl
3418 && TREE_CODE (fndecl) == FUNCTION_DECL
3419 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3420 && !DECL_PURE_P (fndecl)
3421 && !TREE_READONLY (fndecl))
3423 error ("invalid pure const state for function");
3424 return true;
3427 tree lhs = gimple_call_lhs (stmt);
3428 if (lhs
3429 && (!is_gimple_lvalue (lhs)
3430 || verify_types_in_gimple_reference (lhs, true)))
3432 error ("invalid LHS in gimple call");
3433 return true;
3436 if (gimple_call_ctrl_altering_p (stmt)
3437 && gimple_call_noreturn_p (stmt)
3438 && should_remove_lhs_p (lhs))
3440 error ("LHS in noreturn call");
3441 return true;
3444 fntype = gimple_call_fntype (stmt);
3445 if (fntype
3446 && lhs
3447 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3448 /* ??? At least C++ misses conversions at assignments from
3449 void * call results.
3450 ??? Java is completely off. Especially with functions
3451 returning java.lang.Object.
3452 For now simply allow arbitrary pointer type conversions. */
3453 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3454 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3456 error ("invalid conversion in gimple call");
3457 debug_generic_stmt (TREE_TYPE (lhs));
3458 debug_generic_stmt (TREE_TYPE (fntype));
3459 return true;
3462 if (gimple_call_chain (stmt)
3463 && !is_gimple_val (gimple_call_chain (stmt)))
3465 error ("invalid static chain in gimple call");
3466 debug_generic_stmt (gimple_call_chain (stmt));
3467 return true;
3470 /* If there is a static chain argument, the call should either be
3471 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3472 if (gimple_call_chain (stmt)
3473 && fndecl
3474 && !DECL_STATIC_CHAIN (fndecl))
3476 error ("static chain with function that doesn%'t use one");
3477 return true;
3480 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3482 switch (DECL_FUNCTION_CODE (fndecl))
3484 case BUILT_IN_UNREACHABLE:
3485 case BUILT_IN_TRAP:
3486 if (gimple_call_num_args (stmt) > 0)
3488 /* Built-in unreachable with parameters might not be caught by
3489 undefined behavior sanitizer. Front-ends do check users do not
3490 call them that way but we also produce calls to
3491 __builtin_unreachable internally, for example when IPA figures
3492 out a call cannot happen in a legal program. In such cases,
3493 we must make sure arguments are stripped off. */
3494 error ("__builtin_unreachable or __builtin_trap call with "
3495 "arguments");
3496 return true;
3498 break;
3499 default:
3500 break;
3504 /* ??? The C frontend passes unpromoted arguments in case it
3505 didn't see a function declaration before the call. So for now
3506 leave the call arguments mostly unverified. Once we gimplify
3507 unit-at-a-time we have a chance to fix this. */
3509 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3511 tree arg = gimple_call_arg (stmt, i);
3512 if ((is_gimple_reg_type (TREE_TYPE (arg))
3513 && !is_gimple_val (arg))
3514 || (!is_gimple_reg_type (TREE_TYPE (arg))
3515 && !is_gimple_lvalue (arg)))
3517 error ("invalid argument to gimple call");
3518 debug_generic_expr (arg);
3519 return true;
3523 return false;
3526 /* Verifies the gimple comparison with the result type TYPE and
3527 the operands OP0 and OP1, comparison code is CODE. */
3529 static bool
3530 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3532 tree op0_type = TREE_TYPE (op0);
3533 tree op1_type = TREE_TYPE (op1);
3535 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3537 error ("invalid operands in gimple comparison");
3538 return true;
3541 /* For comparisons we do not have the operations type as the
3542 effective type the comparison is carried out in. Instead
3543 we require that either the first operand is trivially
3544 convertible into the second, or the other way around.
3545 Because we special-case pointers to void we allow
3546 comparisons of pointers with the same mode as well. */
3547 if (!useless_type_conversion_p (op0_type, op1_type)
3548 && !useless_type_conversion_p (op1_type, op0_type)
3549 && (!POINTER_TYPE_P (op0_type)
3550 || !POINTER_TYPE_P (op1_type)
3551 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3553 error ("mismatching comparison operand types");
3554 debug_generic_expr (op0_type);
3555 debug_generic_expr (op1_type);
3556 return true;
3559 /* The resulting type of a comparison may be an effective boolean type. */
3560 if (INTEGRAL_TYPE_P (type)
3561 && (TREE_CODE (type) == BOOLEAN_TYPE
3562 || TYPE_PRECISION (type) == 1))
3564 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3565 || TREE_CODE (op1_type) == VECTOR_TYPE)
3566 && code != EQ_EXPR && code != NE_EXPR
3567 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3568 && !VECTOR_INTEGER_TYPE_P (op0_type))
3570 error ("unsupported operation or type for vector comparison"
3571 " returning a boolean");
3572 debug_generic_expr (op0_type);
3573 debug_generic_expr (op1_type);
3574 return true;
3577 /* Or a boolean vector type with the same element count
3578 as the comparison operand types. */
3579 else if (TREE_CODE (type) == VECTOR_TYPE
3580 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3582 if (TREE_CODE (op0_type) != VECTOR_TYPE
3583 || TREE_CODE (op1_type) != VECTOR_TYPE)
3585 error ("non-vector operands in vector comparison");
3586 debug_generic_expr (op0_type);
3587 debug_generic_expr (op1_type);
3588 return true;
3591 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3593 error ("invalid vector comparison resulting type");
3594 debug_generic_expr (type);
3595 return true;
3598 else
3600 error ("bogus comparison result type");
3601 debug_generic_expr (type);
3602 return true;
3605 return false;
3608 /* Verify a gimple assignment statement STMT with an unary rhs.
3609 Returns true if anything is wrong. */
3611 static bool
3612 verify_gimple_assign_unary (gassign *stmt)
3614 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3615 tree lhs = gimple_assign_lhs (stmt);
3616 tree lhs_type = TREE_TYPE (lhs);
3617 tree rhs1 = gimple_assign_rhs1 (stmt);
3618 tree rhs1_type = TREE_TYPE (rhs1);
3620 if (!is_gimple_reg (lhs))
3622 error ("non-register as LHS of unary operation");
3623 return true;
3626 if (!is_gimple_val (rhs1))
3628 error ("invalid operand in unary operation");
3629 return true;
3632 /* First handle conversions. */
3633 switch (rhs_code)
3635 CASE_CONVERT:
3637 /* Allow conversions from pointer type to integral type only if
3638 there is no sign or zero extension involved.
3639 For targets were the precision of ptrofftype doesn't match that
3640 of pointers we need to allow arbitrary conversions to ptrofftype. */
3641 if ((POINTER_TYPE_P (lhs_type)
3642 && INTEGRAL_TYPE_P (rhs1_type))
3643 || (POINTER_TYPE_P (rhs1_type)
3644 && INTEGRAL_TYPE_P (lhs_type)
3645 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3646 || ptrofftype_p (sizetype))))
3647 return false;
3649 /* Allow conversion from integral to offset type and vice versa. */
3650 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3651 && INTEGRAL_TYPE_P (rhs1_type))
3652 || (INTEGRAL_TYPE_P (lhs_type)
3653 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3654 return false;
3656 /* Otherwise assert we are converting between types of the
3657 same kind. */
3658 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3660 error ("invalid types in nop conversion");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3666 return false;
3669 case ADDR_SPACE_CONVERT_EXPR:
3671 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3672 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3673 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3675 error ("invalid types in address space conversion");
3676 debug_generic_expr (lhs_type);
3677 debug_generic_expr (rhs1_type);
3678 return true;
3681 return false;
3684 case FIXED_CONVERT_EXPR:
3686 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3687 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3689 error ("invalid types in fixed-point conversion");
3690 debug_generic_expr (lhs_type);
3691 debug_generic_expr (rhs1_type);
3692 return true;
3695 return false;
3698 case FLOAT_EXPR:
3700 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3701 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3702 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3704 error ("invalid types in conversion to floating point");
3705 debug_generic_expr (lhs_type);
3706 debug_generic_expr (rhs1_type);
3707 return true;
3710 return false;
3713 case FIX_TRUNC_EXPR:
3715 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3716 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3717 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3719 error ("invalid types in conversion to integer");
3720 debug_generic_expr (lhs_type);
3721 debug_generic_expr (rhs1_type);
3722 return true;
3725 return false;
3727 case REDUC_MAX_EXPR:
3728 case REDUC_MIN_EXPR:
3729 case REDUC_PLUS_EXPR:
3730 if (!VECTOR_TYPE_P (rhs1_type)
3731 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3733 error ("reduction should convert from vector to element type");
3734 debug_generic_expr (lhs_type);
3735 debug_generic_expr (rhs1_type);
3736 return true;
3738 return false;
3740 case VEC_UNPACK_HI_EXPR:
3741 case VEC_UNPACK_LO_EXPR:
3742 case VEC_UNPACK_FLOAT_HI_EXPR:
3743 case VEC_UNPACK_FLOAT_LO_EXPR:
3744 /* FIXME. */
3745 return false;
3747 case NEGATE_EXPR:
3748 case ABS_EXPR:
3749 case BIT_NOT_EXPR:
3750 case PAREN_EXPR:
3751 case CONJ_EXPR:
3752 break;
3754 default:
3755 gcc_unreachable ();
3758 /* For the remaining codes assert there is no conversion involved. */
3759 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3761 error ("non-trivial conversion in unary operation");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 return true;
3767 return false;
3770 /* Verify a gimple assignment statement STMT with a binary rhs.
3771 Returns true if anything is wrong. */
3773 static bool
3774 verify_gimple_assign_binary (gassign *stmt)
3776 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3777 tree lhs = gimple_assign_lhs (stmt);
3778 tree lhs_type = TREE_TYPE (lhs);
3779 tree rhs1 = gimple_assign_rhs1 (stmt);
3780 tree rhs1_type = TREE_TYPE (rhs1);
3781 tree rhs2 = gimple_assign_rhs2 (stmt);
3782 tree rhs2_type = TREE_TYPE (rhs2);
3784 if (!is_gimple_reg (lhs))
3786 error ("non-register as LHS of binary operation");
3787 return true;
3790 if (!is_gimple_val (rhs1)
3791 || !is_gimple_val (rhs2))
3793 error ("invalid operands in binary operation");
3794 return true;
3797 /* First handle operations that involve different types. */
3798 switch (rhs_code)
3800 case COMPLEX_EXPR:
3802 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3803 || !(INTEGRAL_TYPE_P (rhs1_type)
3804 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3805 || !(INTEGRAL_TYPE_P (rhs2_type)
3806 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3808 error ("type mismatch in complex expression");
3809 debug_generic_expr (lhs_type);
3810 debug_generic_expr (rhs1_type);
3811 debug_generic_expr (rhs2_type);
3812 return true;
3815 return false;
3818 case LSHIFT_EXPR:
3819 case RSHIFT_EXPR:
3820 case LROTATE_EXPR:
3821 case RROTATE_EXPR:
3823 /* Shifts and rotates are ok on integral types, fixed point
3824 types and integer vector types. */
3825 if ((!INTEGRAL_TYPE_P (rhs1_type)
3826 && !FIXED_POINT_TYPE_P (rhs1_type)
3827 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3828 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3829 || (!INTEGRAL_TYPE_P (rhs2_type)
3830 /* Vector shifts of vectors are also ok. */
3831 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3832 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3833 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3834 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3835 || !useless_type_conversion_p (lhs_type, rhs1_type))
3837 error ("type mismatch in shift expression");
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 debug_generic_expr (rhs2_type);
3841 return true;
3844 return false;
3847 case WIDEN_LSHIFT_EXPR:
3849 if (!INTEGRAL_TYPE_P (lhs_type)
3850 || !INTEGRAL_TYPE_P (rhs1_type)
3851 || TREE_CODE (rhs2) != INTEGER_CST
3852 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3854 error ("type mismatch in widening vector shift expression");
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 debug_generic_expr (rhs2_type);
3858 return true;
3861 return false;
3864 case VEC_WIDEN_LSHIFT_HI_EXPR:
3865 case VEC_WIDEN_LSHIFT_LO_EXPR:
3867 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3868 || TREE_CODE (lhs_type) != VECTOR_TYPE
3869 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3870 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3871 || TREE_CODE (rhs2) != INTEGER_CST
3872 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3873 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3875 error ("type mismatch in widening vector shift expression");
3876 debug_generic_expr (lhs_type);
3877 debug_generic_expr (rhs1_type);
3878 debug_generic_expr (rhs2_type);
3879 return true;
3882 return false;
3885 case PLUS_EXPR:
3886 case MINUS_EXPR:
3888 tree lhs_etype = lhs_type;
3889 tree rhs1_etype = rhs1_type;
3890 tree rhs2_etype = rhs2_type;
3891 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3893 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3894 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3896 error ("invalid non-vector operands to vector valued plus");
3897 return true;
3899 lhs_etype = TREE_TYPE (lhs_type);
3900 rhs1_etype = TREE_TYPE (rhs1_type);
3901 rhs2_etype = TREE_TYPE (rhs2_type);
3903 if (POINTER_TYPE_P (lhs_etype)
3904 || POINTER_TYPE_P (rhs1_etype)
3905 || POINTER_TYPE_P (rhs2_etype))
3907 error ("invalid (pointer) operands to plus/minus");
3908 return true;
3911 /* Continue with generic binary expression handling. */
3912 break;
3915 case POINTER_PLUS_EXPR:
3917 if (!POINTER_TYPE_P (rhs1_type)
3918 || !useless_type_conversion_p (lhs_type, rhs1_type)
3919 || !ptrofftype_p (rhs2_type))
3921 error ("type mismatch in pointer plus expression");
3922 debug_generic_stmt (lhs_type);
3923 debug_generic_stmt (rhs1_type);
3924 debug_generic_stmt (rhs2_type);
3925 return true;
3928 return false;
3931 case TRUTH_ANDIF_EXPR:
3932 case TRUTH_ORIF_EXPR:
3933 case TRUTH_AND_EXPR:
3934 case TRUTH_OR_EXPR:
3935 case TRUTH_XOR_EXPR:
3937 gcc_unreachable ();
3939 case LT_EXPR:
3940 case LE_EXPR:
3941 case GT_EXPR:
3942 case GE_EXPR:
3943 case EQ_EXPR:
3944 case NE_EXPR:
3945 case UNORDERED_EXPR:
3946 case ORDERED_EXPR:
3947 case UNLT_EXPR:
3948 case UNLE_EXPR:
3949 case UNGT_EXPR:
3950 case UNGE_EXPR:
3951 case UNEQ_EXPR:
3952 case LTGT_EXPR:
3953 /* Comparisons are also binary, but the result type is not
3954 connected to the operand types. */
3955 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3957 case WIDEN_MULT_EXPR:
3958 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3959 return true;
3960 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3961 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3963 case WIDEN_SUM_EXPR:
3964 case VEC_WIDEN_MULT_HI_EXPR:
3965 case VEC_WIDEN_MULT_LO_EXPR:
3966 case VEC_WIDEN_MULT_EVEN_EXPR:
3967 case VEC_WIDEN_MULT_ODD_EXPR:
3968 case VEC_PACK_TRUNC_EXPR:
3969 case VEC_PACK_SAT_EXPR:
3970 case VEC_PACK_FIX_TRUNC_EXPR:
3971 /* FIXME. */
3972 return false;
3974 case MULT_EXPR:
3975 case MULT_HIGHPART_EXPR:
3976 case TRUNC_DIV_EXPR:
3977 case CEIL_DIV_EXPR:
3978 case FLOOR_DIV_EXPR:
3979 case ROUND_DIV_EXPR:
3980 case TRUNC_MOD_EXPR:
3981 case CEIL_MOD_EXPR:
3982 case FLOOR_MOD_EXPR:
3983 case ROUND_MOD_EXPR:
3984 case RDIV_EXPR:
3985 case EXACT_DIV_EXPR:
3986 case MIN_EXPR:
3987 case MAX_EXPR:
3988 case BIT_IOR_EXPR:
3989 case BIT_XOR_EXPR:
3990 case BIT_AND_EXPR:
3991 /* Continue with generic binary expression handling. */
3992 break;
3994 default:
3995 gcc_unreachable ();
3998 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3999 || !useless_type_conversion_p (lhs_type, rhs2_type))
4001 error ("type mismatch in binary expression");
4002 debug_generic_stmt (lhs_type);
4003 debug_generic_stmt (rhs1_type);
4004 debug_generic_stmt (rhs2_type);
4005 return true;
4008 return false;
4011 /* Verify a gimple assignment statement STMT with a ternary rhs.
4012 Returns true if anything is wrong. */
4014 static bool
4015 verify_gimple_assign_ternary (gassign *stmt)
4017 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4018 tree lhs = gimple_assign_lhs (stmt);
4019 tree lhs_type = TREE_TYPE (lhs);
4020 tree rhs1 = gimple_assign_rhs1 (stmt);
4021 tree rhs1_type = TREE_TYPE (rhs1);
4022 tree rhs2 = gimple_assign_rhs2 (stmt);
4023 tree rhs2_type = TREE_TYPE (rhs2);
4024 tree rhs3 = gimple_assign_rhs3 (stmt);
4025 tree rhs3_type = TREE_TYPE (rhs3);
4027 if (!is_gimple_reg (lhs))
4029 error ("non-register as LHS of ternary operation");
4030 return true;
4033 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4034 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4035 || !is_gimple_val (rhs2)
4036 || !is_gimple_val (rhs3))
4038 error ("invalid operands in ternary operation");
4039 return true;
4042 /* First handle operations that involve different types. */
4043 switch (rhs_code)
4045 case WIDEN_MULT_PLUS_EXPR:
4046 case WIDEN_MULT_MINUS_EXPR:
4047 if ((!INTEGRAL_TYPE_P (rhs1_type)
4048 && !FIXED_POINT_TYPE_P (rhs1_type))
4049 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4050 || !useless_type_conversion_p (lhs_type, rhs3_type)
4051 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4052 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4054 error ("type mismatch in widening multiply-accumulate expression");
4055 debug_generic_expr (lhs_type);
4056 debug_generic_expr (rhs1_type);
4057 debug_generic_expr (rhs2_type);
4058 debug_generic_expr (rhs3_type);
4059 return true;
4061 break;
4063 case FMA_EXPR:
4064 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4065 || !useless_type_conversion_p (lhs_type, rhs2_type)
4066 || !useless_type_conversion_p (lhs_type, rhs3_type))
4068 error ("type mismatch in fused multiply-add expression");
4069 debug_generic_expr (lhs_type);
4070 debug_generic_expr (rhs1_type);
4071 debug_generic_expr (rhs2_type);
4072 debug_generic_expr (rhs3_type);
4073 return true;
4075 break;
4077 case VEC_COND_EXPR:
4078 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4079 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4080 != TYPE_VECTOR_SUBPARTS (lhs_type))
4082 error ("the first argument of a VEC_COND_EXPR must be of a "
4083 "boolean vector type of the same number of elements "
4084 "as the result");
4085 debug_generic_expr (lhs_type);
4086 debug_generic_expr (rhs1_type);
4087 return true;
4089 /* Fallthrough. */
4090 case COND_EXPR:
4091 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4092 || !useless_type_conversion_p (lhs_type, rhs3_type))
4094 error ("type mismatch in conditional expression");
4095 debug_generic_expr (lhs_type);
4096 debug_generic_expr (rhs2_type);
4097 debug_generic_expr (rhs3_type);
4098 return true;
4100 break;
4102 case VEC_PERM_EXPR:
4103 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4104 || !useless_type_conversion_p (lhs_type, rhs2_type))
4106 error ("type mismatch in vector permute expression");
4107 debug_generic_expr (lhs_type);
4108 debug_generic_expr (rhs1_type);
4109 debug_generic_expr (rhs2_type);
4110 debug_generic_expr (rhs3_type);
4111 return true;
4114 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4115 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4116 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4118 error ("vector types expected in vector permute expression");
4119 debug_generic_expr (lhs_type);
4120 debug_generic_expr (rhs1_type);
4121 debug_generic_expr (rhs2_type);
4122 debug_generic_expr (rhs3_type);
4123 return true;
4126 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4127 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4128 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4129 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4130 != TYPE_VECTOR_SUBPARTS (lhs_type))
4132 error ("vectors with different element number found "
4133 "in vector permute expression");
4134 debug_generic_expr (lhs_type);
4135 debug_generic_expr (rhs1_type);
4136 debug_generic_expr (rhs2_type);
4137 debug_generic_expr (rhs3_type);
4138 return true;
4141 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4142 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4143 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4145 error ("invalid mask type in vector permute expression");
4146 debug_generic_expr (lhs_type);
4147 debug_generic_expr (rhs1_type);
4148 debug_generic_expr (rhs2_type);
4149 debug_generic_expr (rhs3_type);
4150 return true;
4153 return false;
4155 case SAD_EXPR:
4156 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4157 || !useless_type_conversion_p (lhs_type, rhs3_type)
4158 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4159 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4161 error ("type mismatch in sad expression");
4162 debug_generic_expr (lhs_type);
4163 debug_generic_expr (rhs1_type);
4164 debug_generic_expr (rhs2_type);
4165 debug_generic_expr (rhs3_type);
4166 return true;
4169 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4170 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4171 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4173 error ("vector types expected in sad expression");
4174 debug_generic_expr (lhs_type);
4175 debug_generic_expr (rhs1_type);
4176 debug_generic_expr (rhs2_type);
4177 debug_generic_expr (rhs3_type);
4178 return true;
4181 return false;
4183 case BIT_INSERT_EXPR:
4184 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4186 error ("type mismatch in BIT_INSERT_EXPR");
4187 debug_generic_expr (lhs_type);
4188 debug_generic_expr (rhs1_type);
4189 return true;
4191 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4192 && INTEGRAL_TYPE_P (rhs2_type))
4193 || (VECTOR_TYPE_P (rhs1_type)
4194 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4196 error ("not allowed type combination in BIT_INSERT_EXPR");
4197 debug_generic_expr (rhs1_type);
4198 debug_generic_expr (rhs2_type);
4199 return true;
4201 if (! tree_fits_uhwi_p (rhs3)
4202 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4204 error ("invalid position or size in BIT_INSERT_EXPR");
4205 return true;
4207 if (INTEGRAL_TYPE_P (rhs1_type))
4209 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4210 if (bitpos >= TYPE_PRECISION (rhs1_type)
4211 || (bitpos + TYPE_PRECISION (rhs2_type)
4212 > TYPE_PRECISION (rhs1_type)))
4214 error ("insertion out of range in BIT_INSERT_EXPR");
4215 return true;
4218 else if (VECTOR_TYPE_P (rhs1_type))
4220 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4221 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4222 if (bitpos % bitsize != 0)
4224 error ("vector insertion not at element boundary");
4225 return true;
4228 return false;
4230 case DOT_PROD_EXPR:
4231 case REALIGN_LOAD_EXPR:
4232 /* FIXME. */
4233 return false;
4235 default:
4236 gcc_unreachable ();
4238 return false;
4241 /* Verify a gimple assignment statement STMT with a single rhs.
4242 Returns true if anything is wrong. */
4244 static bool
4245 verify_gimple_assign_single (gassign *stmt)
4247 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4248 tree lhs = gimple_assign_lhs (stmt);
4249 tree lhs_type = TREE_TYPE (lhs);
4250 tree rhs1 = gimple_assign_rhs1 (stmt);
4251 tree rhs1_type = TREE_TYPE (rhs1);
4252 bool res = false;
4254 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4256 error ("non-trivial conversion at assignment");
4257 debug_generic_expr (lhs_type);
4258 debug_generic_expr (rhs1_type);
4259 return true;
4262 if (gimple_clobber_p (stmt)
4263 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4265 error ("non-decl/MEM_REF LHS in clobber statement");
4266 debug_generic_expr (lhs);
4267 return true;
4270 if (handled_component_p (lhs)
4271 || TREE_CODE (lhs) == MEM_REF
4272 || TREE_CODE (lhs) == TARGET_MEM_REF)
4273 res |= verify_types_in_gimple_reference (lhs, true);
4275 /* Special codes we cannot handle via their class. */
4276 switch (rhs_code)
4278 case ADDR_EXPR:
4280 tree op = TREE_OPERAND (rhs1, 0);
4281 if (!is_gimple_addressable (op))
4283 error ("invalid operand in unary expression");
4284 return true;
4287 /* Technically there is no longer a need for matching types, but
4288 gimple hygiene asks for this check. In LTO we can end up
4289 combining incompatible units and thus end up with addresses
4290 of globals that change their type to a common one. */
4291 if (!in_lto_p
4292 && !types_compatible_p (TREE_TYPE (op),
4293 TREE_TYPE (TREE_TYPE (rhs1)))
4294 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4295 TREE_TYPE (op)))
4297 error ("type mismatch in address expression");
4298 debug_generic_stmt (TREE_TYPE (rhs1));
4299 debug_generic_stmt (TREE_TYPE (op));
4300 return true;
4303 return verify_types_in_gimple_reference (op, true);
4306 /* tcc_reference */
4307 case INDIRECT_REF:
4308 error ("INDIRECT_REF in gimple IL");
4309 return true;
4311 case COMPONENT_REF:
4312 case BIT_FIELD_REF:
4313 case ARRAY_REF:
4314 case ARRAY_RANGE_REF:
4315 case VIEW_CONVERT_EXPR:
4316 case REALPART_EXPR:
4317 case IMAGPART_EXPR:
4318 case TARGET_MEM_REF:
4319 case MEM_REF:
4320 if (!is_gimple_reg (lhs)
4321 && is_gimple_reg_type (TREE_TYPE (lhs)))
4323 error ("invalid rhs for gimple memory store");
4324 debug_generic_stmt (lhs);
4325 debug_generic_stmt (rhs1);
4326 return true;
4328 return res || verify_types_in_gimple_reference (rhs1, false);
4330 /* tcc_constant */
4331 case SSA_NAME:
4332 case INTEGER_CST:
4333 case REAL_CST:
4334 case FIXED_CST:
4335 case COMPLEX_CST:
4336 case VECTOR_CST:
4337 case STRING_CST:
4338 return res;
4340 /* tcc_declaration */
4341 case CONST_DECL:
4342 return res;
4343 case VAR_DECL:
4344 case PARM_DECL:
4345 if (!is_gimple_reg (lhs)
4346 && !is_gimple_reg (rhs1)
4347 && is_gimple_reg_type (TREE_TYPE (lhs)))
4349 error ("invalid rhs for gimple memory store");
4350 debug_generic_stmt (lhs);
4351 debug_generic_stmt (rhs1);
4352 return true;
4354 return res;
4356 case CONSTRUCTOR:
4357 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4359 unsigned int i;
4360 tree elt_i, elt_v, elt_t = NULL_TREE;
4362 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4363 return res;
4364 /* For vector CONSTRUCTORs we require that either it is empty
4365 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4366 (then the element count must be correct to cover the whole
4367 outer vector and index must be NULL on all elements, or it is
4368 a CONSTRUCTOR of scalar elements, where we as an exception allow
4369 smaller number of elements (assuming zero filling) and
4370 consecutive indexes as compared to NULL indexes (such
4371 CONSTRUCTORs can appear in the IL from FEs). */
4372 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4374 if (elt_t == NULL_TREE)
4376 elt_t = TREE_TYPE (elt_v);
4377 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4379 tree elt_t = TREE_TYPE (elt_v);
4380 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4381 TREE_TYPE (elt_t)))
4383 error ("incorrect type of vector CONSTRUCTOR"
4384 " elements");
4385 debug_generic_stmt (rhs1);
4386 return true;
4388 else if (CONSTRUCTOR_NELTS (rhs1)
4389 * TYPE_VECTOR_SUBPARTS (elt_t)
4390 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4392 error ("incorrect number of vector CONSTRUCTOR"
4393 " elements");
4394 debug_generic_stmt (rhs1);
4395 return true;
4398 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4399 elt_t))
4401 error ("incorrect type of vector CONSTRUCTOR elements");
4402 debug_generic_stmt (rhs1);
4403 return true;
4405 else if (CONSTRUCTOR_NELTS (rhs1)
4406 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4408 error ("incorrect number of vector CONSTRUCTOR elements");
4409 debug_generic_stmt (rhs1);
4410 return true;
4413 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4415 error ("incorrect type of vector CONSTRUCTOR elements");
4416 debug_generic_stmt (rhs1);
4417 return true;
4419 if (elt_i != NULL_TREE
4420 && (TREE_CODE (elt_t) == VECTOR_TYPE
4421 || TREE_CODE (elt_i) != INTEGER_CST
4422 || compare_tree_int (elt_i, i) != 0))
4424 error ("vector CONSTRUCTOR with non-NULL element index");
4425 debug_generic_stmt (rhs1);
4426 return true;
4428 if (!is_gimple_val (elt_v))
4430 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4431 debug_generic_stmt (rhs1);
4432 return true;
4436 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4438 error ("non-vector CONSTRUCTOR with elements");
4439 debug_generic_stmt (rhs1);
4440 return true;
4442 return res;
4443 case OBJ_TYPE_REF:
4444 case ASSERT_EXPR:
4445 case WITH_SIZE_EXPR:
4446 /* FIXME. */
4447 return res;
4449 default:;
4452 return res;
4455 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4456 is a problem, otherwise false. */
4458 static bool
4459 verify_gimple_assign (gassign *stmt)
4461 switch (gimple_assign_rhs_class (stmt))
4463 case GIMPLE_SINGLE_RHS:
4464 return verify_gimple_assign_single (stmt);
4466 case GIMPLE_UNARY_RHS:
4467 return verify_gimple_assign_unary (stmt);
4469 case GIMPLE_BINARY_RHS:
4470 return verify_gimple_assign_binary (stmt);
4472 case GIMPLE_TERNARY_RHS:
4473 return verify_gimple_assign_ternary (stmt);
4475 default:
4476 gcc_unreachable ();
4480 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4481 is a problem, otherwise false. */
4483 static bool
4484 verify_gimple_return (greturn *stmt)
4486 tree op = gimple_return_retval (stmt);
4487 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4489 /* We cannot test for present return values as we do not fix up missing
4490 return values from the original source. */
4491 if (op == NULL)
4492 return false;
4494 if (!is_gimple_val (op)
4495 && TREE_CODE (op) != RESULT_DECL)
4497 error ("invalid operand in return statement");
4498 debug_generic_stmt (op);
4499 return true;
4502 if ((TREE_CODE (op) == RESULT_DECL
4503 && DECL_BY_REFERENCE (op))
4504 || (TREE_CODE (op) == SSA_NAME
4505 && SSA_NAME_VAR (op)
4506 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4507 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4508 op = TREE_TYPE (op);
4510 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4512 error ("invalid conversion in return statement");
4513 debug_generic_stmt (restype);
4514 debug_generic_stmt (TREE_TYPE (op));
4515 return true;
4518 return false;
4522 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4523 is a problem, otherwise false. */
4525 static bool
4526 verify_gimple_goto (ggoto *stmt)
4528 tree dest = gimple_goto_dest (stmt);
4530 /* ??? We have two canonical forms of direct goto destinations, a
4531 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4532 if (TREE_CODE (dest) != LABEL_DECL
4533 && (!is_gimple_val (dest)
4534 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4536 error ("goto destination is neither a label nor a pointer");
4537 return true;
4540 return false;
4543 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4544 is a problem, otherwise false. */
4546 static bool
4547 verify_gimple_switch (gswitch *stmt)
4549 unsigned int i, n;
4550 tree elt, prev_upper_bound = NULL_TREE;
4551 tree index_type, elt_type = NULL_TREE;
4553 if (!is_gimple_val (gimple_switch_index (stmt)))
4555 error ("invalid operand to switch statement");
4556 debug_generic_stmt (gimple_switch_index (stmt));
4557 return true;
4560 index_type = TREE_TYPE (gimple_switch_index (stmt));
4561 if (! INTEGRAL_TYPE_P (index_type))
4563 error ("non-integral type switch statement");
4564 debug_generic_expr (index_type);
4565 return true;
4568 elt = gimple_switch_label (stmt, 0);
4569 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4571 error ("invalid default case label in switch statement");
4572 debug_generic_expr (elt);
4573 return true;
4576 n = gimple_switch_num_labels (stmt);
4577 for (i = 1; i < n; i++)
4579 elt = gimple_switch_label (stmt, i);
4581 if (! CASE_LOW (elt))
4583 error ("invalid case label in switch statement");
4584 debug_generic_expr (elt);
4585 return true;
4587 if (CASE_HIGH (elt)
4588 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4590 error ("invalid case range in switch statement");
4591 debug_generic_expr (elt);
4592 return true;
4595 if (elt_type)
4597 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4598 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4600 error ("type mismatch for case label in switch statement");
4601 debug_generic_expr (elt);
4602 return true;
4605 else
4607 elt_type = TREE_TYPE (CASE_LOW (elt));
4608 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4610 error ("type precision mismatch in switch statement");
4611 return true;
4615 if (prev_upper_bound)
4617 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4619 error ("case labels not sorted in switch statement");
4620 return true;
4624 prev_upper_bound = CASE_HIGH (elt);
4625 if (! prev_upper_bound)
4626 prev_upper_bound = CASE_LOW (elt);
4629 return false;
4632 /* Verify a gimple debug statement STMT.
4633 Returns true if anything is wrong. */
4635 static bool
4636 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4638 /* There isn't much that could be wrong in a gimple debug stmt. A
4639 gimple debug bind stmt, for example, maps a tree, that's usually
4640 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4641 component or member of an aggregate type, to another tree, that
4642 can be an arbitrary expression. These stmts expand into debug
4643 insns, and are converted to debug notes by var-tracking.c. */
4644 return false;
4647 /* Verify a gimple label statement STMT.
4648 Returns true if anything is wrong. */
4650 static bool
4651 verify_gimple_label (glabel *stmt)
4653 tree decl = gimple_label_label (stmt);
4654 int uid;
4655 bool err = false;
4657 if (TREE_CODE (decl) != LABEL_DECL)
4658 return true;
4659 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4660 && DECL_CONTEXT (decl) != current_function_decl)
4662 error ("label's context is not the current function decl");
4663 err |= true;
4666 uid = LABEL_DECL_UID (decl);
4667 if (cfun->cfg
4668 && (uid == -1
4669 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4671 error ("incorrect entry in label_to_block_map");
4672 err |= true;
4675 uid = EH_LANDING_PAD_NR (decl);
4676 if (uid)
4678 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4679 if (decl != lp->post_landing_pad)
4681 error ("incorrect setting of landing pad number");
4682 err |= true;
4686 return err;
4689 /* Verify a gimple cond statement STMT.
4690 Returns true if anything is wrong. */
4692 static bool
4693 verify_gimple_cond (gcond *stmt)
4695 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4697 error ("invalid comparison code in gimple cond");
4698 return true;
4700 if (!(!gimple_cond_true_label (stmt)
4701 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4702 || !(!gimple_cond_false_label (stmt)
4703 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4705 error ("invalid labels in gimple cond");
4706 return true;
4709 return verify_gimple_comparison (boolean_type_node,
4710 gimple_cond_lhs (stmt),
4711 gimple_cond_rhs (stmt),
4712 gimple_cond_code (stmt));
4715 /* Verify the GIMPLE statement STMT. Returns true if there is an
4716 error, otherwise false. */
4718 static bool
4719 verify_gimple_stmt (gimple *stmt)
4721 switch (gimple_code (stmt))
4723 case GIMPLE_ASSIGN:
4724 return verify_gimple_assign (as_a <gassign *> (stmt));
4726 case GIMPLE_LABEL:
4727 return verify_gimple_label (as_a <glabel *> (stmt));
4729 case GIMPLE_CALL:
4730 return verify_gimple_call (as_a <gcall *> (stmt));
4732 case GIMPLE_COND:
4733 return verify_gimple_cond (as_a <gcond *> (stmt));
4735 case GIMPLE_GOTO:
4736 return verify_gimple_goto (as_a <ggoto *> (stmt));
4738 case GIMPLE_SWITCH:
4739 return verify_gimple_switch (as_a <gswitch *> (stmt));
4741 case GIMPLE_RETURN:
4742 return verify_gimple_return (as_a <greturn *> (stmt));
4744 case GIMPLE_ASM:
4745 return false;
4747 case GIMPLE_TRANSACTION:
4748 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4750 /* Tuples that do not have tree operands. */
4751 case GIMPLE_NOP:
4752 case GIMPLE_PREDICT:
4753 case GIMPLE_RESX:
4754 case GIMPLE_EH_DISPATCH:
4755 case GIMPLE_EH_MUST_NOT_THROW:
4756 return false;
4758 CASE_GIMPLE_OMP:
4759 /* OpenMP directives are validated by the FE and never operated
4760 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4761 non-gimple expressions when the main index variable has had
4762 its address taken. This does not affect the loop itself
4763 because the header of an GIMPLE_OMP_FOR is merely used to determine
4764 how to setup the parallel iteration. */
4765 return false;
4767 case GIMPLE_DEBUG:
4768 return verify_gimple_debug (stmt);
4770 default:
4771 gcc_unreachable ();
4775 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4776 and false otherwise. */
4778 static bool
4779 verify_gimple_phi (gimple *phi)
4781 bool err = false;
4782 unsigned i;
4783 tree phi_result = gimple_phi_result (phi);
4784 bool virtual_p;
4786 if (!phi_result)
4788 error ("invalid PHI result");
4789 return true;
4792 virtual_p = virtual_operand_p (phi_result);
4793 if (TREE_CODE (phi_result) != SSA_NAME
4794 || (virtual_p
4795 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4797 error ("invalid PHI result");
4798 err = true;
4801 for (i = 0; i < gimple_phi_num_args (phi); i++)
4803 tree t = gimple_phi_arg_def (phi, i);
4805 if (!t)
4807 error ("missing PHI def");
4808 err |= true;
4809 continue;
4811 /* Addressable variables do have SSA_NAMEs but they
4812 are not considered gimple values. */
4813 else if ((TREE_CODE (t) == SSA_NAME
4814 && virtual_p != virtual_operand_p (t))
4815 || (virtual_p
4816 && (TREE_CODE (t) != SSA_NAME
4817 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4818 || (!virtual_p
4819 && !is_gimple_val (t)))
4821 error ("invalid PHI argument");
4822 debug_generic_expr (t);
4823 err |= true;
4825 #ifdef ENABLE_TYPES_CHECKING
4826 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4828 error ("incompatible types in PHI argument %u", i);
4829 debug_generic_stmt (TREE_TYPE (phi_result));
4830 debug_generic_stmt (TREE_TYPE (t));
4831 err |= true;
4833 #endif
4836 return err;
4839 /* Verify the GIMPLE statements inside the sequence STMTS. */
4841 static bool
4842 verify_gimple_in_seq_2 (gimple_seq stmts)
4844 gimple_stmt_iterator ittr;
4845 bool err = false;
4847 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4849 gimple *stmt = gsi_stmt (ittr);
4851 switch (gimple_code (stmt))
4853 case GIMPLE_BIND:
4854 err |= verify_gimple_in_seq_2 (
4855 gimple_bind_body (as_a <gbind *> (stmt)));
4856 break;
4858 case GIMPLE_TRY:
4859 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4860 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4861 break;
4863 case GIMPLE_EH_FILTER:
4864 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4865 break;
4867 case GIMPLE_EH_ELSE:
4869 geh_else *eh_else = as_a <geh_else *> (stmt);
4870 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4871 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4873 break;
4875 case GIMPLE_CATCH:
4876 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4877 as_a <gcatch *> (stmt)));
4878 break;
4880 case GIMPLE_TRANSACTION:
4881 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4882 break;
4884 default:
4886 bool err2 = verify_gimple_stmt (stmt);
4887 if (err2)
4888 debug_gimple_stmt (stmt);
4889 err |= err2;
4894 return err;
4897 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4898 is a problem, otherwise false. */
4900 static bool
4901 verify_gimple_transaction (gtransaction *stmt)
4903 tree lab;
4905 lab = gimple_transaction_label_norm (stmt);
4906 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4907 return true;
4908 lab = gimple_transaction_label_uninst (stmt);
4909 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4910 return true;
4911 lab = gimple_transaction_label_over (stmt);
4912 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4913 return true;
4915 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4919 /* Verify the GIMPLE statements inside the statement list STMTS. */
4921 DEBUG_FUNCTION void
4922 verify_gimple_in_seq (gimple_seq stmts)
4924 timevar_push (TV_TREE_STMT_VERIFY);
4925 if (verify_gimple_in_seq_2 (stmts))
4926 internal_error ("verify_gimple failed");
4927 timevar_pop (TV_TREE_STMT_VERIFY);
4930 /* Return true when the T can be shared. */
4932 static bool
4933 tree_node_can_be_shared (tree t)
4935 if (IS_TYPE_OR_DECL_P (t)
4936 || is_gimple_min_invariant (t)
4937 || TREE_CODE (t) == SSA_NAME
4938 || t == error_mark_node
4939 || TREE_CODE (t) == IDENTIFIER_NODE)
4940 return true;
4942 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4943 return true;
4945 if (DECL_P (t))
4946 return true;
4948 return false;
4951 /* Called via walk_tree. Verify tree sharing. */
4953 static tree
4954 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4956 hash_set<void *> *visited = (hash_set<void *> *) data;
4958 if (tree_node_can_be_shared (*tp))
4960 *walk_subtrees = false;
4961 return NULL;
4964 if (visited->add (*tp))
4965 return *tp;
4967 return NULL;
4970 /* Called via walk_gimple_stmt. Verify tree sharing. */
4972 static tree
4973 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4975 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4976 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4979 static bool eh_error_found;
4980 bool
4981 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4982 hash_set<gimple *> *visited)
4984 if (!visited->contains (stmt))
4986 error ("dead STMT in EH table");
4987 debug_gimple_stmt (stmt);
4988 eh_error_found = true;
4990 return true;
4993 /* Verify if the location LOCs block is in BLOCKS. */
4995 static bool
4996 verify_location (hash_set<tree> *blocks, location_t loc)
4998 tree block = LOCATION_BLOCK (loc);
4999 if (block != NULL_TREE
5000 && !blocks->contains (block))
5002 error ("location references block not in block tree");
5003 return true;
5005 if (block != NULL_TREE)
5006 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5007 return false;
5010 /* Called via walk_tree. Verify that expressions have no blocks. */
5012 static tree
5013 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5015 if (!EXPR_P (*tp))
5017 *walk_subtrees = false;
5018 return NULL;
5021 location_t loc = EXPR_LOCATION (*tp);
5022 if (LOCATION_BLOCK (loc) != NULL)
5023 return *tp;
5025 return NULL;
5028 /* Called via walk_tree. Verify locations of expressions. */
5030 static tree
5031 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5033 hash_set<tree> *blocks = (hash_set<tree> *) data;
5035 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5037 tree t = DECL_DEBUG_EXPR (*tp);
5038 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5039 if (addr)
5040 return addr;
5042 if ((VAR_P (*tp)
5043 || TREE_CODE (*tp) == PARM_DECL
5044 || TREE_CODE (*tp) == RESULT_DECL)
5045 && DECL_HAS_VALUE_EXPR_P (*tp))
5047 tree t = DECL_VALUE_EXPR (*tp);
5048 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5049 if (addr)
5050 return addr;
5053 if (!EXPR_P (*tp))
5055 *walk_subtrees = false;
5056 return NULL;
5059 location_t loc = EXPR_LOCATION (*tp);
5060 if (verify_location (blocks, loc))
5061 return *tp;
5063 return NULL;
5066 /* Called via walk_gimple_op. Verify locations of expressions. */
5068 static tree
5069 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5071 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5072 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5075 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5077 static void
5078 collect_subblocks (hash_set<tree> *blocks, tree block)
5080 tree t;
5081 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5083 blocks->add (t);
5084 collect_subblocks (blocks, t);
5088 /* Verify the GIMPLE statements in the CFG of FN. */
5090 DEBUG_FUNCTION void
5091 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5093 basic_block bb;
5094 bool err = false;
5096 timevar_push (TV_TREE_STMT_VERIFY);
5097 hash_set<void *> visited;
5098 hash_set<gimple *> visited_stmts;
5100 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5101 hash_set<tree> blocks;
5102 if (DECL_INITIAL (fn->decl))
5104 blocks.add (DECL_INITIAL (fn->decl));
5105 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5108 FOR_EACH_BB_FN (bb, fn)
5110 gimple_stmt_iterator gsi;
5112 for (gphi_iterator gpi = gsi_start_phis (bb);
5113 !gsi_end_p (gpi);
5114 gsi_next (&gpi))
5116 gphi *phi = gpi.phi ();
5117 bool err2 = false;
5118 unsigned i;
5120 visited_stmts.add (phi);
5122 if (gimple_bb (phi) != bb)
5124 error ("gimple_bb (phi) is set to a wrong basic block");
5125 err2 = true;
5128 err2 |= verify_gimple_phi (phi);
5130 /* Only PHI arguments have locations. */
5131 if (gimple_location (phi) != UNKNOWN_LOCATION)
5133 error ("PHI node with location");
5134 err2 = true;
5137 for (i = 0; i < gimple_phi_num_args (phi); i++)
5139 tree arg = gimple_phi_arg_def (phi, i);
5140 tree addr = walk_tree (&arg, verify_node_sharing_1,
5141 &visited, NULL);
5142 if (addr)
5144 error ("incorrect sharing of tree nodes");
5145 debug_generic_expr (addr);
5146 err2 |= true;
5148 location_t loc = gimple_phi_arg_location (phi, i);
5149 if (virtual_operand_p (gimple_phi_result (phi))
5150 && loc != UNKNOWN_LOCATION)
5152 error ("virtual PHI with argument locations");
5153 err2 = true;
5155 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5156 if (addr)
5158 debug_generic_expr (addr);
5159 err2 = true;
5161 err2 |= verify_location (&blocks, loc);
5164 if (err2)
5165 debug_gimple_stmt (phi);
5166 err |= err2;
5169 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5171 gimple *stmt = gsi_stmt (gsi);
5172 bool err2 = false;
5173 struct walk_stmt_info wi;
5174 tree addr;
5175 int lp_nr;
5177 visited_stmts.add (stmt);
5179 if (gimple_bb (stmt) != bb)
5181 error ("gimple_bb (stmt) is set to a wrong basic block");
5182 err2 = true;
5185 err2 |= verify_gimple_stmt (stmt);
5186 err2 |= verify_location (&blocks, gimple_location (stmt));
5188 memset (&wi, 0, sizeof (wi));
5189 wi.info = (void *) &visited;
5190 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5191 if (addr)
5193 error ("incorrect sharing of tree nodes");
5194 debug_generic_expr (addr);
5195 err2 |= true;
5198 memset (&wi, 0, sizeof (wi));
5199 wi.info = (void *) &blocks;
5200 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5201 if (addr)
5203 debug_generic_expr (addr);
5204 err2 |= true;
5207 /* ??? Instead of not checking these stmts at all the walker
5208 should know its context via wi. */
5209 if (!is_gimple_debug (stmt)
5210 && !is_gimple_omp (stmt))
5212 memset (&wi, 0, sizeof (wi));
5213 addr = walk_gimple_op (stmt, verify_expr, &wi);
5214 if (addr)
5216 debug_generic_expr (addr);
5217 inform (gimple_location (stmt), "in statement");
5218 err2 |= true;
5222 /* If the statement is marked as part of an EH region, then it is
5223 expected that the statement could throw. Verify that when we
5224 have optimizations that simplify statements such that we prove
5225 that they cannot throw, that we update other data structures
5226 to match. */
5227 lp_nr = lookup_stmt_eh_lp (stmt);
5228 if (lp_nr > 0)
5230 if (!stmt_could_throw_p (stmt))
5232 if (verify_nothrow)
5234 error ("statement marked for throw, but doesn%'t");
5235 err2 |= true;
5238 else if (!gsi_one_before_end_p (gsi))
5240 error ("statement marked for throw in middle of block");
5241 err2 |= true;
5245 if (err2)
5246 debug_gimple_stmt (stmt);
5247 err |= err2;
5251 eh_error_found = false;
5252 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5253 if (eh_table)
5254 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5255 (&visited_stmts);
5257 if (err || eh_error_found)
5258 internal_error ("verify_gimple failed");
5260 verify_histograms ();
5261 timevar_pop (TV_TREE_STMT_VERIFY);
5265 /* Verifies that the flow information is OK. */
5267 static int
5268 gimple_verify_flow_info (void)
5270 int err = 0;
5271 basic_block bb;
5272 gimple_stmt_iterator gsi;
5273 gimple *stmt;
5274 edge e;
5275 edge_iterator ei;
5277 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5278 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5280 error ("ENTRY_BLOCK has IL associated with it");
5281 err = 1;
5284 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5285 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5287 error ("EXIT_BLOCK has IL associated with it");
5288 err = 1;
5291 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5292 if (e->flags & EDGE_FALLTHRU)
5294 error ("fallthru to exit from bb %d", e->src->index);
5295 err = 1;
5298 FOR_EACH_BB_FN (bb, cfun)
5300 bool found_ctrl_stmt = false;
5302 stmt = NULL;
5304 /* Skip labels on the start of basic block. */
5305 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5307 tree label;
5308 gimple *prev_stmt = stmt;
5310 stmt = gsi_stmt (gsi);
5312 if (gimple_code (stmt) != GIMPLE_LABEL)
5313 break;
5315 label = gimple_label_label (as_a <glabel *> (stmt));
5316 if (prev_stmt && DECL_NONLOCAL (label))
5318 error ("nonlocal label ");
5319 print_generic_expr (stderr, label, 0);
5320 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5321 bb->index);
5322 err = 1;
5325 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5327 error ("EH landing pad label ");
5328 print_generic_expr (stderr, label, 0);
5329 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5330 bb->index);
5331 err = 1;
5334 if (label_to_block (label) != bb)
5336 error ("label ");
5337 print_generic_expr (stderr, label, 0);
5338 fprintf (stderr, " to block does not match in bb %d",
5339 bb->index);
5340 err = 1;
5343 if (decl_function_context (label) != current_function_decl)
5345 error ("label ");
5346 print_generic_expr (stderr, label, 0);
5347 fprintf (stderr, " has incorrect context in bb %d",
5348 bb->index);
5349 err = 1;
5353 /* Verify that body of basic block BB is free of control flow. */
5354 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5356 gimple *stmt = gsi_stmt (gsi);
5358 if (found_ctrl_stmt)
5360 error ("control flow in the middle of basic block %d",
5361 bb->index);
5362 err = 1;
5365 if (stmt_ends_bb_p (stmt))
5366 found_ctrl_stmt = true;
5368 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5370 error ("label ");
5371 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5372 fprintf (stderr, " in the middle of basic block %d", bb->index);
5373 err = 1;
5377 gsi = gsi_last_bb (bb);
5378 if (gsi_end_p (gsi))
5379 continue;
5381 stmt = gsi_stmt (gsi);
5383 if (gimple_code (stmt) == GIMPLE_LABEL)
5384 continue;
5386 err |= verify_eh_edges (stmt);
5388 if (is_ctrl_stmt (stmt))
5390 FOR_EACH_EDGE (e, ei, bb->succs)
5391 if (e->flags & EDGE_FALLTHRU)
5393 error ("fallthru edge after a control statement in bb %d",
5394 bb->index);
5395 err = 1;
5399 if (gimple_code (stmt) != GIMPLE_COND)
5401 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5402 after anything else but if statement. */
5403 FOR_EACH_EDGE (e, ei, bb->succs)
5404 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5406 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5407 bb->index);
5408 err = 1;
5412 switch (gimple_code (stmt))
5414 case GIMPLE_COND:
5416 edge true_edge;
5417 edge false_edge;
5419 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5421 if (!true_edge
5422 || !false_edge
5423 || !(true_edge->flags & EDGE_TRUE_VALUE)
5424 || !(false_edge->flags & EDGE_FALSE_VALUE)
5425 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5426 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5427 || EDGE_COUNT (bb->succs) >= 3)
5429 error ("wrong outgoing edge flags at end of bb %d",
5430 bb->index);
5431 err = 1;
5434 break;
5436 case GIMPLE_GOTO:
5437 if (simple_goto_p (stmt))
5439 error ("explicit goto at end of bb %d", bb->index);
5440 err = 1;
5442 else
5444 /* FIXME. We should double check that the labels in the
5445 destination blocks have their address taken. */
5446 FOR_EACH_EDGE (e, ei, bb->succs)
5447 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5448 | EDGE_FALSE_VALUE))
5449 || !(e->flags & EDGE_ABNORMAL))
5451 error ("wrong outgoing edge flags at end of bb %d",
5452 bb->index);
5453 err = 1;
5456 break;
5458 case GIMPLE_CALL:
5459 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5460 break;
5461 /* fallthru */
5462 case GIMPLE_RETURN:
5463 if (!single_succ_p (bb)
5464 || (single_succ_edge (bb)->flags
5465 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5466 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5468 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5469 err = 1;
5471 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5473 error ("return edge does not point to exit in bb %d",
5474 bb->index);
5475 err = 1;
5477 break;
5479 case GIMPLE_SWITCH:
5481 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5482 tree prev;
5483 edge e;
5484 size_t i, n;
5486 n = gimple_switch_num_labels (switch_stmt);
5488 /* Mark all the destination basic blocks. */
5489 for (i = 0; i < n; ++i)
5491 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5492 basic_block label_bb = label_to_block (lab);
5493 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5494 label_bb->aux = (void *)1;
5497 /* Verify that the case labels are sorted. */
5498 prev = gimple_switch_label (switch_stmt, 0);
5499 for (i = 1; i < n; ++i)
5501 tree c = gimple_switch_label (switch_stmt, i);
5502 if (!CASE_LOW (c))
5504 error ("found default case not at the start of "
5505 "case vector");
5506 err = 1;
5507 continue;
5509 if (CASE_LOW (prev)
5510 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5512 error ("case labels not sorted: ");
5513 print_generic_expr (stderr, prev, 0);
5514 fprintf (stderr," is greater than ");
5515 print_generic_expr (stderr, c, 0);
5516 fprintf (stderr," but comes before it.\n");
5517 err = 1;
5519 prev = c;
5521 /* VRP will remove the default case if it can prove it will
5522 never be executed. So do not verify there always exists
5523 a default case here. */
5525 FOR_EACH_EDGE (e, ei, bb->succs)
5527 if (!e->dest->aux)
5529 error ("extra outgoing edge %d->%d",
5530 bb->index, e->dest->index);
5531 err = 1;
5534 e->dest->aux = (void *)2;
5535 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5536 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5538 error ("wrong outgoing edge flags at end of bb %d",
5539 bb->index);
5540 err = 1;
5544 /* Check that we have all of them. */
5545 for (i = 0; i < n; ++i)
5547 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5548 basic_block label_bb = label_to_block (lab);
5550 if (label_bb->aux != (void *)2)
5552 error ("missing edge %i->%i", bb->index, label_bb->index);
5553 err = 1;
5557 FOR_EACH_EDGE (e, ei, bb->succs)
5558 e->dest->aux = (void *)0;
5560 break;
5562 case GIMPLE_EH_DISPATCH:
5563 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5564 break;
5566 default:
5567 break;
5571 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5572 verify_dominators (CDI_DOMINATORS);
5574 return err;
5578 /* Updates phi nodes after creating a forwarder block joined
5579 by edge FALLTHRU. */
5581 static void
5582 gimple_make_forwarder_block (edge fallthru)
5584 edge e;
5585 edge_iterator ei;
5586 basic_block dummy, bb;
5587 tree var;
5588 gphi_iterator gsi;
5590 dummy = fallthru->src;
5591 bb = fallthru->dest;
5593 if (single_pred_p (bb))
5594 return;
5596 /* If we redirected a branch we must create new PHI nodes at the
5597 start of BB. */
5598 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5600 gphi *phi, *new_phi;
5602 phi = gsi.phi ();
5603 var = gimple_phi_result (phi);
5604 new_phi = create_phi_node (var, bb);
5605 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5606 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5607 UNKNOWN_LOCATION);
5610 /* Add the arguments we have stored on edges. */
5611 FOR_EACH_EDGE (e, ei, bb->preds)
5613 if (e == fallthru)
5614 continue;
5616 flush_pending_stmts (e);
5621 /* Return a non-special label in the head of basic block BLOCK.
5622 Create one if it doesn't exist. */
5624 tree
5625 gimple_block_label (basic_block bb)
5627 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5628 bool first = true;
5629 tree label;
5630 glabel *stmt;
5632 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5634 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5635 if (!stmt)
5636 break;
5637 label = gimple_label_label (stmt);
5638 if (!DECL_NONLOCAL (label))
5640 if (!first)
5641 gsi_move_before (&i, &s);
5642 return label;
5646 label = create_artificial_label (UNKNOWN_LOCATION);
5647 stmt = gimple_build_label (label);
5648 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5649 return label;
5653 /* Attempt to perform edge redirection by replacing a possibly complex
5654 jump instruction by a goto or by removing the jump completely.
5655 This can apply only if all edges now point to the same block. The
5656 parameters and return values are equivalent to
5657 redirect_edge_and_branch. */
5659 static edge
5660 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5662 basic_block src = e->src;
5663 gimple_stmt_iterator i;
5664 gimple *stmt;
5666 /* We can replace or remove a complex jump only when we have exactly
5667 two edges. */
5668 if (EDGE_COUNT (src->succs) != 2
5669 /* Verify that all targets will be TARGET. Specifically, the
5670 edge that is not E must also go to TARGET. */
5671 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5672 return NULL;
5674 i = gsi_last_bb (src);
5675 if (gsi_end_p (i))
5676 return NULL;
5678 stmt = gsi_stmt (i);
5680 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5682 gsi_remove (&i, true);
5683 e = ssa_redirect_edge (e, target);
5684 e->flags = EDGE_FALLTHRU;
5685 return e;
5688 return NULL;
5692 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5693 edge representing the redirected branch. */
5695 static edge
5696 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5698 basic_block bb = e->src;
5699 gimple_stmt_iterator gsi;
5700 edge ret;
5701 gimple *stmt;
5703 if (e->flags & EDGE_ABNORMAL)
5704 return NULL;
5706 if (e->dest == dest)
5707 return NULL;
5709 if (e->flags & EDGE_EH)
5710 return redirect_eh_edge (e, dest);
5712 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5714 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5715 if (ret)
5716 return ret;
5719 gsi = gsi_last_bb (bb);
5720 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5722 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5724 case GIMPLE_COND:
5725 /* For COND_EXPR, we only need to redirect the edge. */
5726 break;
5728 case GIMPLE_GOTO:
5729 /* No non-abnormal edges should lead from a non-simple goto, and
5730 simple ones should be represented implicitly. */
5731 gcc_unreachable ();
5733 case GIMPLE_SWITCH:
5735 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5736 tree label = gimple_block_label (dest);
5737 tree cases = get_cases_for_edge (e, switch_stmt);
5739 /* If we have a list of cases associated with E, then use it
5740 as it's a lot faster than walking the entire case vector. */
5741 if (cases)
5743 edge e2 = find_edge (e->src, dest);
5744 tree last, first;
5746 first = cases;
5747 while (cases)
5749 last = cases;
5750 CASE_LABEL (cases) = label;
5751 cases = CASE_CHAIN (cases);
5754 /* If there was already an edge in the CFG, then we need
5755 to move all the cases associated with E to E2. */
5756 if (e2)
5758 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5760 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5761 CASE_CHAIN (cases2) = first;
5763 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5765 else
5767 size_t i, n = gimple_switch_num_labels (switch_stmt);
5769 for (i = 0; i < n; i++)
5771 tree elt = gimple_switch_label (switch_stmt, i);
5772 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5773 CASE_LABEL (elt) = label;
5777 break;
5779 case GIMPLE_ASM:
5781 gasm *asm_stmt = as_a <gasm *> (stmt);
5782 int i, n = gimple_asm_nlabels (asm_stmt);
5783 tree label = NULL;
5785 for (i = 0; i < n; ++i)
5787 tree cons = gimple_asm_label_op (asm_stmt, i);
5788 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5790 if (!label)
5791 label = gimple_block_label (dest);
5792 TREE_VALUE (cons) = label;
5796 /* If we didn't find any label matching the former edge in the
5797 asm labels, we must be redirecting the fallthrough
5798 edge. */
5799 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5801 break;
5803 case GIMPLE_RETURN:
5804 gsi_remove (&gsi, true);
5805 e->flags |= EDGE_FALLTHRU;
5806 break;
5808 case GIMPLE_OMP_RETURN:
5809 case GIMPLE_OMP_CONTINUE:
5810 case GIMPLE_OMP_SECTIONS_SWITCH:
5811 case GIMPLE_OMP_FOR:
5812 /* The edges from OMP constructs can be simply redirected. */
5813 break;
5815 case GIMPLE_EH_DISPATCH:
5816 if (!(e->flags & EDGE_FALLTHRU))
5817 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5818 break;
5820 case GIMPLE_TRANSACTION:
5821 if (e->flags & EDGE_TM_ABORT)
5822 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5823 gimple_block_label (dest));
5824 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5825 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5826 gimple_block_label (dest));
5827 else
5828 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5829 gimple_block_label (dest));
5830 break;
5832 default:
5833 /* Otherwise it must be a fallthru edge, and we don't need to
5834 do anything besides redirecting it. */
5835 gcc_assert (e->flags & EDGE_FALLTHRU);
5836 break;
5839 /* Update/insert PHI nodes as necessary. */
5841 /* Now update the edges in the CFG. */
5842 e = ssa_redirect_edge (e, dest);
5844 return e;
5847 /* Returns true if it is possible to remove edge E by redirecting
5848 it to the destination of the other edge from E->src. */
5850 static bool
5851 gimple_can_remove_branch_p (const_edge e)
5853 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5854 return false;
5856 return true;
5859 /* Simple wrapper, as we can always redirect fallthru edges. */
5861 static basic_block
5862 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5864 e = gimple_redirect_edge_and_branch (e, dest);
5865 gcc_assert (e);
5867 return NULL;
5871 /* Splits basic block BB after statement STMT (but at least after the
5872 labels). If STMT is NULL, BB is split just after the labels. */
5874 static basic_block
5875 gimple_split_block (basic_block bb, void *stmt)
5877 gimple_stmt_iterator gsi;
5878 gimple_stmt_iterator gsi_tgt;
5879 gimple_seq list;
5880 basic_block new_bb;
5881 edge e;
5882 edge_iterator ei;
5884 new_bb = create_empty_bb (bb);
5886 /* Redirect the outgoing edges. */
5887 new_bb->succs = bb->succs;
5888 bb->succs = NULL;
5889 FOR_EACH_EDGE (e, ei, new_bb->succs)
5890 e->src = new_bb;
5892 /* Get a stmt iterator pointing to the first stmt to move. */
5893 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5894 gsi = gsi_after_labels (bb);
5895 else
5897 gsi = gsi_for_stmt ((gimple *) stmt);
5898 gsi_next (&gsi);
5901 /* Move everything from GSI to the new basic block. */
5902 if (gsi_end_p (gsi))
5903 return new_bb;
5905 /* Split the statement list - avoid re-creating new containers as this
5906 brings ugly quadratic memory consumption in the inliner.
5907 (We are still quadratic since we need to update stmt BB pointers,
5908 sadly.) */
5909 gsi_split_seq_before (&gsi, &list);
5910 set_bb_seq (new_bb, list);
5911 for (gsi_tgt = gsi_start (list);
5912 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5913 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5915 return new_bb;
5919 /* Moves basic block BB after block AFTER. */
5921 static bool
5922 gimple_move_block_after (basic_block bb, basic_block after)
5924 if (bb->prev_bb == after)
5925 return true;
5927 unlink_block (bb);
5928 link_block (bb, after);
5930 return true;
5934 /* Return TRUE if block BB has no executable statements, otherwise return
5935 FALSE. */
5937 static bool
5938 gimple_empty_block_p (basic_block bb)
5940 /* BB must have no executable statements. */
5941 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5942 if (phi_nodes (bb))
5943 return false;
5944 if (gsi_end_p (gsi))
5945 return true;
5946 if (is_gimple_debug (gsi_stmt (gsi)))
5947 gsi_next_nondebug (&gsi);
5948 return gsi_end_p (gsi);
5952 /* Split a basic block if it ends with a conditional branch and if the
5953 other part of the block is not empty. */
5955 static basic_block
5956 gimple_split_block_before_cond_jump (basic_block bb)
5958 gimple *last, *split_point;
5959 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5960 if (gsi_end_p (gsi))
5961 return NULL;
5962 last = gsi_stmt (gsi);
5963 if (gimple_code (last) != GIMPLE_COND
5964 && gimple_code (last) != GIMPLE_SWITCH)
5965 return NULL;
5966 gsi_prev (&gsi);
5967 split_point = gsi_stmt (gsi);
5968 return split_block (bb, split_point)->dest;
5972 /* Return true if basic_block can be duplicated. */
5974 static bool
5975 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5977 return true;
5980 /* Create a duplicate of the basic block BB. NOTE: This does not
5981 preserve SSA form. */
5983 static basic_block
5984 gimple_duplicate_bb (basic_block bb)
5986 basic_block new_bb;
5987 gimple_stmt_iterator gsi_tgt;
5989 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5991 /* Copy the PHI nodes. We ignore PHI node arguments here because
5992 the incoming edges have not been setup yet. */
5993 for (gphi_iterator gpi = gsi_start_phis (bb);
5994 !gsi_end_p (gpi);
5995 gsi_next (&gpi))
5997 gphi *phi, *copy;
5998 phi = gpi.phi ();
5999 copy = create_phi_node (NULL_TREE, new_bb);
6000 create_new_def_for (gimple_phi_result (phi), copy,
6001 gimple_phi_result_ptr (copy));
6002 gimple_set_uid (copy, gimple_uid (phi));
6005 gsi_tgt = gsi_start_bb (new_bb);
6006 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6007 !gsi_end_p (gsi);
6008 gsi_next (&gsi))
6010 def_operand_p def_p;
6011 ssa_op_iter op_iter;
6012 tree lhs;
6013 gimple *stmt, *copy;
6015 stmt = gsi_stmt (gsi);
6016 if (gimple_code (stmt) == GIMPLE_LABEL)
6017 continue;
6019 /* Don't duplicate label debug stmts. */
6020 if (gimple_debug_bind_p (stmt)
6021 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6022 == LABEL_DECL)
6023 continue;
6025 /* Create a new copy of STMT and duplicate STMT's virtual
6026 operands. */
6027 copy = gimple_copy (stmt);
6028 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6030 maybe_duplicate_eh_stmt (copy, stmt);
6031 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6033 /* When copying around a stmt writing into a local non-user
6034 aggregate, make sure it won't share stack slot with other
6035 vars. */
6036 lhs = gimple_get_lhs (stmt);
6037 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6039 tree base = get_base_address (lhs);
6040 if (base
6041 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6042 && DECL_IGNORED_P (base)
6043 && !TREE_STATIC (base)
6044 && !DECL_EXTERNAL (base)
6045 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6046 DECL_NONSHAREABLE (base) = 1;
6049 /* Create new names for all the definitions created by COPY and
6050 add replacement mappings for each new name. */
6051 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6052 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6055 return new_bb;
6058 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6060 static void
6061 add_phi_args_after_copy_edge (edge e_copy)
6063 basic_block bb, bb_copy = e_copy->src, dest;
6064 edge e;
6065 edge_iterator ei;
6066 gphi *phi, *phi_copy;
6067 tree def;
6068 gphi_iterator psi, psi_copy;
6070 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6071 return;
6073 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6075 if (e_copy->dest->flags & BB_DUPLICATED)
6076 dest = get_bb_original (e_copy->dest);
6077 else
6078 dest = e_copy->dest;
6080 e = find_edge (bb, dest);
6081 if (!e)
6083 /* During loop unrolling the target of the latch edge is copied.
6084 In this case we are not looking for edge to dest, but to
6085 duplicated block whose original was dest. */
6086 FOR_EACH_EDGE (e, ei, bb->succs)
6088 if ((e->dest->flags & BB_DUPLICATED)
6089 && get_bb_original (e->dest) == dest)
6090 break;
6093 gcc_assert (e != NULL);
6096 for (psi = gsi_start_phis (e->dest),
6097 psi_copy = gsi_start_phis (e_copy->dest);
6098 !gsi_end_p (psi);
6099 gsi_next (&psi), gsi_next (&psi_copy))
6101 phi = psi.phi ();
6102 phi_copy = psi_copy.phi ();
6103 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6104 add_phi_arg (phi_copy, def, e_copy,
6105 gimple_phi_arg_location_from_edge (phi, e));
6110 /* Basic block BB_COPY was created by code duplication. Add phi node
6111 arguments for edges going out of BB_COPY. The blocks that were
6112 duplicated have BB_DUPLICATED set. */
6114 void
6115 add_phi_args_after_copy_bb (basic_block bb_copy)
6117 edge e_copy;
6118 edge_iterator ei;
6120 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6122 add_phi_args_after_copy_edge (e_copy);
6126 /* Blocks in REGION_COPY array of length N_REGION were created by
6127 duplication of basic blocks. Add phi node arguments for edges
6128 going from these blocks. If E_COPY is not NULL, also add
6129 phi node arguments for its destination.*/
6131 void
6132 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6133 edge e_copy)
6135 unsigned i;
6137 for (i = 0; i < n_region; i++)
6138 region_copy[i]->flags |= BB_DUPLICATED;
6140 for (i = 0; i < n_region; i++)
6141 add_phi_args_after_copy_bb (region_copy[i]);
6142 if (e_copy)
6143 add_phi_args_after_copy_edge (e_copy);
6145 for (i = 0; i < n_region; i++)
6146 region_copy[i]->flags &= ~BB_DUPLICATED;
6149 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6150 important exit edge EXIT. By important we mean that no SSA name defined
6151 inside region is live over the other exit edges of the region. All entry
6152 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6153 to the duplicate of the region. Dominance and loop information is
6154 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6155 UPDATE_DOMINANCE is false then we assume that the caller will update the
6156 dominance information after calling this function. The new basic
6157 blocks are stored to REGION_COPY in the same order as they had in REGION,
6158 provided that REGION_COPY is not NULL.
6159 The function returns false if it is unable to copy the region,
6160 true otherwise. */
6162 bool
6163 gimple_duplicate_sese_region (edge entry, edge exit,
6164 basic_block *region, unsigned n_region,
6165 basic_block *region_copy,
6166 bool update_dominance)
6168 unsigned i;
6169 bool free_region_copy = false, copying_header = false;
6170 struct loop *loop = entry->dest->loop_father;
6171 edge exit_copy;
6172 vec<basic_block> doms;
6173 edge redirected;
6174 int total_freq = 0, entry_freq = 0;
6175 gcov_type total_count = 0, entry_count = 0;
6177 if (!can_copy_bbs_p (region, n_region))
6178 return false;
6180 /* Some sanity checking. Note that we do not check for all possible
6181 missuses of the functions. I.e. if you ask to copy something weird,
6182 it will work, but the state of structures probably will not be
6183 correct. */
6184 for (i = 0; i < n_region; i++)
6186 /* We do not handle subloops, i.e. all the blocks must belong to the
6187 same loop. */
6188 if (region[i]->loop_father != loop)
6189 return false;
6191 if (region[i] != entry->dest
6192 && region[i] == loop->header)
6193 return false;
6196 /* In case the function is used for loop header copying (which is the primary
6197 use), ensure that EXIT and its copy will be new latch and entry edges. */
6198 if (loop->header == entry->dest)
6200 copying_header = true;
6202 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6203 return false;
6205 for (i = 0; i < n_region; i++)
6206 if (region[i] != exit->src
6207 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6208 return false;
6211 initialize_original_copy_tables ();
6213 if (copying_header)
6214 set_loop_copy (loop, loop_outer (loop));
6215 else
6216 set_loop_copy (loop, loop);
6218 if (!region_copy)
6220 region_copy = XNEWVEC (basic_block, n_region);
6221 free_region_copy = true;
6224 /* Record blocks outside the region that are dominated by something
6225 inside. */
6226 if (update_dominance)
6228 doms.create (0);
6229 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6232 if (entry->dest->count)
6234 total_count = entry->dest->count;
6235 entry_count = entry->count;
6236 /* Fix up corner cases, to avoid division by zero or creation of negative
6237 frequencies. */
6238 if (entry_count > total_count)
6239 entry_count = total_count;
6241 else
6243 total_freq = entry->dest->frequency;
6244 entry_freq = EDGE_FREQUENCY (entry);
6245 /* Fix up corner cases, to avoid division by zero or creation of negative
6246 frequencies. */
6247 if (total_freq == 0)
6248 total_freq = 1;
6249 else if (entry_freq > total_freq)
6250 entry_freq = total_freq;
6253 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6254 split_edge_bb_loc (entry), update_dominance);
6255 if (total_count)
6257 scale_bbs_frequencies_gcov_type (region, n_region,
6258 total_count - entry_count,
6259 total_count);
6260 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6261 total_count);
6263 else
6265 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6266 total_freq);
6267 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6270 if (copying_header)
6272 loop->header = exit->dest;
6273 loop->latch = exit->src;
6276 /* Redirect the entry and add the phi node arguments. */
6277 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6278 gcc_assert (redirected != NULL);
6279 flush_pending_stmts (entry);
6281 /* Concerning updating of dominators: We must recount dominators
6282 for entry block and its copy. Anything that is outside of the
6283 region, but was dominated by something inside needs recounting as
6284 well. */
6285 if (update_dominance)
6287 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6288 doms.safe_push (get_bb_original (entry->dest));
6289 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6290 doms.release ();
6293 /* Add the other PHI node arguments. */
6294 add_phi_args_after_copy (region_copy, n_region, NULL);
6296 if (free_region_copy)
6297 free (region_copy);
6299 free_original_copy_tables ();
6300 return true;
6303 /* Checks if BB is part of the region defined by N_REGION BBS. */
6304 static bool
6305 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6307 unsigned int n;
6309 for (n = 0; n < n_region; n++)
6311 if (bb == bbs[n])
6312 return true;
6314 return false;
6317 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6318 are stored to REGION_COPY in the same order in that they appear
6319 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6320 the region, EXIT an exit from it. The condition guarding EXIT
6321 is moved to ENTRY. Returns true if duplication succeeds, false
6322 otherwise.
6324 For example,
6326 some_code;
6327 if (cond)
6329 else
6332 is transformed to
6334 if (cond)
6336 some_code;
6339 else
6341 some_code;
6346 bool
6347 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6348 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6349 basic_block *region_copy ATTRIBUTE_UNUSED)
6351 unsigned i;
6352 bool free_region_copy = false;
6353 struct loop *loop = exit->dest->loop_father;
6354 struct loop *orig_loop = entry->dest->loop_father;
6355 basic_block switch_bb, entry_bb, nentry_bb;
6356 vec<basic_block> doms;
6357 int total_freq = 0, exit_freq = 0;
6358 gcov_type total_count = 0, exit_count = 0;
6359 edge exits[2], nexits[2], e;
6360 gimple_stmt_iterator gsi;
6361 gimple *cond_stmt;
6362 edge sorig, snew;
6363 basic_block exit_bb;
6364 gphi_iterator psi;
6365 gphi *phi;
6366 tree def;
6367 struct loop *target, *aloop, *cloop;
6369 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6370 exits[0] = exit;
6371 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6373 if (!can_copy_bbs_p (region, n_region))
6374 return false;
6376 initialize_original_copy_tables ();
6377 set_loop_copy (orig_loop, loop);
6379 target= loop;
6380 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6382 if (bb_part_of_region_p (aloop->header, region, n_region))
6384 cloop = duplicate_loop (aloop, target);
6385 duplicate_subloops (aloop, cloop);
6389 if (!region_copy)
6391 region_copy = XNEWVEC (basic_block, n_region);
6392 free_region_copy = true;
6395 gcc_assert (!need_ssa_update_p (cfun));
6397 /* Record blocks outside the region that are dominated by something
6398 inside. */
6399 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6401 if (exit->src->count)
6403 total_count = exit->src->count;
6404 exit_count = exit->count;
6405 /* Fix up corner cases, to avoid division by zero or creation of negative
6406 frequencies. */
6407 if (exit_count > total_count)
6408 exit_count = total_count;
6410 else
6412 total_freq = exit->src->frequency;
6413 exit_freq = EDGE_FREQUENCY (exit);
6414 /* Fix up corner cases, to avoid division by zero or creation of negative
6415 frequencies. */
6416 if (total_freq == 0)
6417 total_freq = 1;
6418 if (exit_freq > total_freq)
6419 exit_freq = total_freq;
6422 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6423 split_edge_bb_loc (exit), true);
6424 if (total_count)
6426 scale_bbs_frequencies_gcov_type (region, n_region,
6427 total_count - exit_count,
6428 total_count);
6429 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6430 total_count);
6432 else
6434 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6435 total_freq);
6436 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6439 /* Create the switch block, and put the exit condition to it. */
6440 entry_bb = entry->dest;
6441 nentry_bb = get_bb_copy (entry_bb);
6442 if (!last_stmt (entry->src)
6443 || !stmt_ends_bb_p (last_stmt (entry->src)))
6444 switch_bb = entry->src;
6445 else
6446 switch_bb = split_edge (entry);
6447 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6449 gsi = gsi_last_bb (switch_bb);
6450 cond_stmt = last_stmt (exit->src);
6451 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6452 cond_stmt = gimple_copy (cond_stmt);
6454 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6456 sorig = single_succ_edge (switch_bb);
6457 sorig->flags = exits[1]->flags;
6458 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6460 /* Register the new edge from SWITCH_BB in loop exit lists. */
6461 rescan_loop_exit (snew, true, false);
6463 /* Add the PHI node arguments. */
6464 add_phi_args_after_copy (region_copy, n_region, snew);
6466 /* Get rid of now superfluous conditions and associated edges (and phi node
6467 arguments). */
6468 exit_bb = exit->dest;
6470 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6471 PENDING_STMT (e) = NULL;
6473 /* The latch of ORIG_LOOP was copied, and so was the backedge
6474 to the original header. We redirect this backedge to EXIT_BB. */
6475 for (i = 0; i < n_region; i++)
6476 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6478 gcc_assert (single_succ_edge (region_copy[i]));
6479 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6480 PENDING_STMT (e) = NULL;
6481 for (psi = gsi_start_phis (exit_bb);
6482 !gsi_end_p (psi);
6483 gsi_next (&psi))
6485 phi = psi.phi ();
6486 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6487 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6490 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6491 PENDING_STMT (e) = NULL;
6493 /* Anything that is outside of the region, but was dominated by something
6494 inside needs to update dominance info. */
6495 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6496 doms.release ();
6497 /* Update the SSA web. */
6498 update_ssa (TODO_update_ssa);
6500 if (free_region_copy)
6501 free (region_copy);
6503 free_original_copy_tables ();
6504 return true;
6507 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6508 adding blocks when the dominator traversal reaches EXIT. This
6509 function silently assumes that ENTRY strictly dominates EXIT. */
6511 void
6512 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6513 vec<basic_block> *bbs_p)
6515 basic_block son;
6517 for (son = first_dom_son (CDI_DOMINATORS, entry);
6518 son;
6519 son = next_dom_son (CDI_DOMINATORS, son))
6521 bbs_p->safe_push (son);
6522 if (son != exit)
6523 gather_blocks_in_sese_region (son, exit, bbs_p);
6527 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6528 The duplicates are recorded in VARS_MAP. */
6530 static void
6531 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6532 tree to_context)
6534 tree t = *tp, new_t;
6535 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6537 if (DECL_CONTEXT (t) == to_context)
6538 return;
6540 bool existed;
6541 tree &loc = vars_map->get_or_insert (t, &existed);
6543 if (!existed)
6545 if (SSA_VAR_P (t))
6547 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6548 add_local_decl (f, new_t);
6550 else
6552 gcc_assert (TREE_CODE (t) == CONST_DECL);
6553 new_t = copy_node (t);
6555 DECL_CONTEXT (new_t) = to_context;
6557 loc = new_t;
6559 else
6560 new_t = loc;
6562 *tp = new_t;
6566 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6567 VARS_MAP maps old ssa names and var_decls to the new ones. */
6569 static tree
6570 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6571 tree to_context)
6573 tree new_name;
6575 gcc_assert (!virtual_operand_p (name));
6577 tree *loc = vars_map->get (name);
6579 if (!loc)
6581 tree decl = SSA_NAME_VAR (name);
6582 if (decl)
6584 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6585 replace_by_duplicate_decl (&decl, vars_map, to_context);
6586 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6587 decl, SSA_NAME_DEF_STMT (name));
6589 else
6590 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6591 name, SSA_NAME_DEF_STMT (name));
6593 /* Now that we've used the def stmt to define new_name, make sure it
6594 doesn't define name anymore. */
6595 SSA_NAME_DEF_STMT (name) = NULL;
6597 vars_map->put (name, new_name);
6599 else
6600 new_name = *loc;
6602 return new_name;
6605 struct move_stmt_d
6607 tree orig_block;
6608 tree new_block;
6609 tree from_context;
6610 tree to_context;
6611 hash_map<tree, tree> *vars_map;
6612 htab_t new_label_map;
6613 hash_map<void *, void *> *eh_map;
6614 bool remap_decls_p;
6617 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6618 contained in *TP if it has been ORIG_BLOCK previously and change the
6619 DECL_CONTEXT of every local variable referenced in *TP. */
6621 static tree
6622 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6624 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6625 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6626 tree t = *tp;
6628 if (EXPR_P (t))
6630 tree block = TREE_BLOCK (t);
6631 if (block == p->orig_block
6632 || (p->orig_block == NULL_TREE
6633 && block != NULL_TREE))
6634 TREE_SET_BLOCK (t, p->new_block);
6635 else if (flag_checking && block != NULL_TREE)
6637 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6638 block = BLOCK_SUPERCONTEXT (block);
6639 gcc_assert (block == p->orig_block);
6642 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6644 if (TREE_CODE (t) == SSA_NAME)
6645 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6646 else if (TREE_CODE (t) == PARM_DECL
6647 && gimple_in_ssa_p (cfun))
6648 *tp = *(p->vars_map->get (t));
6649 else if (TREE_CODE (t) == LABEL_DECL)
6651 if (p->new_label_map)
6653 struct tree_map in, *out;
6654 in.base.from = t;
6655 out = (struct tree_map *)
6656 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6657 if (out)
6658 *tp = t = out->to;
6661 DECL_CONTEXT (t) = p->to_context;
6663 else if (p->remap_decls_p)
6665 /* Replace T with its duplicate. T should no longer appear in the
6666 parent function, so this looks wasteful; however, it may appear
6667 in referenced_vars, and more importantly, as virtual operands of
6668 statements, and in alias lists of other variables. It would be
6669 quite difficult to expunge it from all those places. ??? It might
6670 suffice to do this for addressable variables. */
6671 if ((VAR_P (t) && !is_global_var (t))
6672 || TREE_CODE (t) == CONST_DECL)
6673 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6675 *walk_subtrees = 0;
6677 else if (TYPE_P (t))
6678 *walk_subtrees = 0;
6680 return NULL_TREE;
6683 /* Helper for move_stmt_r. Given an EH region number for the source
6684 function, map that to the duplicate EH regio number in the dest. */
6686 static int
6687 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6689 eh_region old_r, new_r;
6691 old_r = get_eh_region_from_number (old_nr);
6692 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6694 return new_r->index;
6697 /* Similar, but operate on INTEGER_CSTs. */
6699 static tree
6700 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6702 int old_nr, new_nr;
6704 old_nr = tree_to_shwi (old_t_nr);
6705 new_nr = move_stmt_eh_region_nr (old_nr, p);
6707 return build_int_cst (integer_type_node, new_nr);
6710 /* Like move_stmt_op, but for gimple statements.
6712 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6713 contained in the current statement in *GSI_P and change the
6714 DECL_CONTEXT of every local variable referenced in the current
6715 statement. */
6717 static tree
6718 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6719 struct walk_stmt_info *wi)
6721 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6722 gimple *stmt = gsi_stmt (*gsi_p);
6723 tree block = gimple_block (stmt);
6725 if (block == p->orig_block
6726 || (p->orig_block == NULL_TREE
6727 && block != NULL_TREE))
6728 gimple_set_block (stmt, p->new_block);
6730 switch (gimple_code (stmt))
6732 case GIMPLE_CALL:
6733 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6735 tree r, fndecl = gimple_call_fndecl (stmt);
6736 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6737 switch (DECL_FUNCTION_CODE (fndecl))
6739 case BUILT_IN_EH_COPY_VALUES:
6740 r = gimple_call_arg (stmt, 1);
6741 r = move_stmt_eh_region_tree_nr (r, p);
6742 gimple_call_set_arg (stmt, 1, r);
6743 /* FALLTHRU */
6745 case BUILT_IN_EH_POINTER:
6746 case BUILT_IN_EH_FILTER:
6747 r = gimple_call_arg (stmt, 0);
6748 r = move_stmt_eh_region_tree_nr (r, p);
6749 gimple_call_set_arg (stmt, 0, r);
6750 break;
6752 default:
6753 break;
6756 break;
6758 case GIMPLE_RESX:
6760 gresx *resx_stmt = as_a <gresx *> (stmt);
6761 int r = gimple_resx_region (resx_stmt);
6762 r = move_stmt_eh_region_nr (r, p);
6763 gimple_resx_set_region (resx_stmt, r);
6765 break;
6767 case GIMPLE_EH_DISPATCH:
6769 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6770 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6771 r = move_stmt_eh_region_nr (r, p);
6772 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6774 break;
6776 case GIMPLE_OMP_RETURN:
6777 case GIMPLE_OMP_CONTINUE:
6778 break;
6779 default:
6780 if (is_gimple_omp (stmt))
6782 /* Do not remap variables inside OMP directives. Variables
6783 referenced in clauses and directive header belong to the
6784 parent function and should not be moved into the child
6785 function. */
6786 bool save_remap_decls_p = p->remap_decls_p;
6787 p->remap_decls_p = false;
6788 *handled_ops_p = true;
6790 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6791 move_stmt_op, wi);
6793 p->remap_decls_p = save_remap_decls_p;
6795 break;
6798 return NULL_TREE;
6801 /* Move basic block BB from function CFUN to function DEST_FN. The
6802 block is moved out of the original linked list and placed after
6803 block AFTER in the new list. Also, the block is removed from the
6804 original array of blocks and placed in DEST_FN's array of blocks.
6805 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6806 updated to reflect the moved edges.
6808 The local variables are remapped to new instances, VARS_MAP is used
6809 to record the mapping. */
6811 static void
6812 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6813 basic_block after, bool update_edge_count_p,
6814 struct move_stmt_d *d)
6816 struct control_flow_graph *cfg;
6817 edge_iterator ei;
6818 edge e;
6819 gimple_stmt_iterator si;
6820 unsigned old_len, new_len;
6822 /* Remove BB from dominance structures. */
6823 delete_from_dominance_info (CDI_DOMINATORS, bb);
6825 /* Move BB from its current loop to the copy in the new function. */
6826 if (current_loops)
6828 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6829 if (new_loop)
6830 bb->loop_father = new_loop;
6833 /* Link BB to the new linked list. */
6834 move_block_after (bb, after);
6836 /* Update the edge count in the corresponding flowgraphs. */
6837 if (update_edge_count_p)
6838 FOR_EACH_EDGE (e, ei, bb->succs)
6840 cfun->cfg->x_n_edges--;
6841 dest_cfun->cfg->x_n_edges++;
6844 /* Remove BB from the original basic block array. */
6845 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6846 cfun->cfg->x_n_basic_blocks--;
6848 /* Grow DEST_CFUN's basic block array if needed. */
6849 cfg = dest_cfun->cfg;
6850 cfg->x_n_basic_blocks++;
6851 if (bb->index >= cfg->x_last_basic_block)
6852 cfg->x_last_basic_block = bb->index + 1;
6854 old_len = vec_safe_length (cfg->x_basic_block_info);
6855 if ((unsigned) cfg->x_last_basic_block >= old_len)
6857 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6858 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6861 (*cfg->x_basic_block_info)[bb->index] = bb;
6863 /* Remap the variables in phi nodes. */
6864 for (gphi_iterator psi = gsi_start_phis (bb);
6865 !gsi_end_p (psi); )
6867 gphi *phi = psi.phi ();
6868 use_operand_p use;
6869 tree op = PHI_RESULT (phi);
6870 ssa_op_iter oi;
6871 unsigned i;
6873 if (virtual_operand_p (op))
6875 /* Remove the phi nodes for virtual operands (alias analysis will be
6876 run for the new function, anyway). */
6877 remove_phi_node (&psi, true);
6878 continue;
6881 SET_PHI_RESULT (phi,
6882 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6883 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6885 op = USE_FROM_PTR (use);
6886 if (TREE_CODE (op) == SSA_NAME)
6887 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6890 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6892 location_t locus = gimple_phi_arg_location (phi, i);
6893 tree block = LOCATION_BLOCK (locus);
6895 if (locus == UNKNOWN_LOCATION)
6896 continue;
6897 if (d->orig_block == NULL_TREE || block == d->orig_block)
6899 locus = set_block (locus, d->new_block);
6900 gimple_phi_arg_set_location (phi, i, locus);
6904 gsi_next (&psi);
6907 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6909 gimple *stmt = gsi_stmt (si);
6910 struct walk_stmt_info wi;
6912 memset (&wi, 0, sizeof (wi));
6913 wi.info = d;
6914 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6916 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6918 tree label = gimple_label_label (label_stmt);
6919 int uid = LABEL_DECL_UID (label);
6921 gcc_assert (uid > -1);
6923 old_len = vec_safe_length (cfg->x_label_to_block_map);
6924 if (old_len <= (unsigned) uid)
6926 new_len = 3 * uid / 2 + 1;
6927 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6930 (*cfg->x_label_to_block_map)[uid] = bb;
6931 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6933 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6935 if (uid >= dest_cfun->cfg->last_label_uid)
6936 dest_cfun->cfg->last_label_uid = uid + 1;
6939 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6940 remove_stmt_from_eh_lp_fn (cfun, stmt);
6942 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6943 gimple_remove_stmt_histograms (cfun, stmt);
6945 /* We cannot leave any operands allocated from the operand caches of
6946 the current function. */
6947 free_stmt_operands (cfun, stmt);
6948 push_cfun (dest_cfun);
6949 update_stmt (stmt);
6950 pop_cfun ();
6953 FOR_EACH_EDGE (e, ei, bb->succs)
6954 if (e->goto_locus != UNKNOWN_LOCATION)
6956 tree block = LOCATION_BLOCK (e->goto_locus);
6957 if (d->orig_block == NULL_TREE
6958 || block == d->orig_block)
6959 e->goto_locus = set_block (e->goto_locus, d->new_block);
6963 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6964 the outermost EH region. Use REGION as the incoming base EH region. */
6966 static eh_region
6967 find_outermost_region_in_block (struct function *src_cfun,
6968 basic_block bb, eh_region region)
6970 gimple_stmt_iterator si;
6972 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6974 gimple *stmt = gsi_stmt (si);
6975 eh_region stmt_region;
6976 int lp_nr;
6978 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6979 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6980 if (stmt_region)
6982 if (region == NULL)
6983 region = stmt_region;
6984 else if (stmt_region != region)
6986 region = eh_region_outermost (src_cfun, stmt_region, region);
6987 gcc_assert (region != NULL);
6992 return region;
6995 static tree
6996 new_label_mapper (tree decl, void *data)
6998 htab_t hash = (htab_t) data;
6999 struct tree_map *m;
7000 void **slot;
7002 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7004 m = XNEW (struct tree_map);
7005 m->hash = DECL_UID (decl);
7006 m->base.from = decl;
7007 m->to = create_artificial_label (UNKNOWN_LOCATION);
7008 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7009 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7010 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7012 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7013 gcc_assert (*slot == NULL);
7015 *slot = m;
7017 return m->to;
7020 /* Tree walker to replace the decls used inside value expressions by
7021 duplicates. */
7023 static tree
7024 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7026 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7028 switch (TREE_CODE (*tp))
7030 case VAR_DECL:
7031 case PARM_DECL:
7032 case RESULT_DECL:
7033 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7034 break;
7035 default:
7036 break;
7039 if (IS_TYPE_OR_DECL_P (*tp))
7040 *walk_subtrees = false;
7042 return NULL;
7045 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7046 subblocks. */
7048 static void
7049 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7050 tree to_context)
7052 tree *tp, t;
7054 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7056 t = *tp;
7057 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7058 continue;
7059 replace_by_duplicate_decl (&t, vars_map, to_context);
7060 if (t != *tp)
7062 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7064 tree x = DECL_VALUE_EXPR (*tp);
7065 struct replace_decls_d rd = { vars_map, to_context };
7066 unshare_expr (x);
7067 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7068 SET_DECL_VALUE_EXPR (t, x);
7069 DECL_HAS_VALUE_EXPR_P (t) = 1;
7071 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7072 *tp = t;
7076 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7077 replace_block_vars_by_duplicates (block, vars_map, to_context);
7080 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7081 from FN1 to FN2. */
7083 static void
7084 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7085 struct loop *loop)
7087 /* Discard it from the old loop array. */
7088 (*get_loops (fn1))[loop->num] = NULL;
7090 /* Place it in the new loop array, assigning it a new number. */
7091 loop->num = number_of_loops (fn2);
7092 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7094 /* Recurse to children. */
7095 for (loop = loop->inner; loop; loop = loop->next)
7096 fixup_loop_arrays_after_move (fn1, fn2, loop);
7099 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7100 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7102 DEBUG_FUNCTION void
7103 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7105 basic_block bb;
7106 edge_iterator ei;
7107 edge e;
7108 bitmap bbs = BITMAP_ALLOC (NULL);
7109 int i;
7111 gcc_assert (entry != NULL);
7112 gcc_assert (entry != exit);
7113 gcc_assert (bbs_p != NULL);
7115 gcc_assert (bbs_p->length () > 0);
7117 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7118 bitmap_set_bit (bbs, bb->index);
7120 gcc_assert (bitmap_bit_p (bbs, entry->index));
7121 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7123 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7125 if (bb == entry)
7127 gcc_assert (single_pred_p (entry));
7128 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7130 else
7131 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7133 e = ei_edge (ei);
7134 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7137 if (bb == exit)
7139 gcc_assert (single_succ_p (exit));
7140 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7142 else
7143 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7145 e = ei_edge (ei);
7146 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7150 BITMAP_FREE (bbs);
7153 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7155 bool
7156 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7158 bitmap release_names = (bitmap)data;
7160 if (TREE_CODE (from) != SSA_NAME)
7161 return true;
7163 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7164 return true;
7167 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7168 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7169 single basic block in the original CFG and the new basic block is
7170 returned. DEST_CFUN must not have a CFG yet.
7172 Note that the region need not be a pure SESE region. Blocks inside
7173 the region may contain calls to abort/exit. The only restriction
7174 is that ENTRY_BB should be the only entry point and it must
7175 dominate EXIT_BB.
7177 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7178 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7179 to the new function.
7181 All local variables referenced in the region are assumed to be in
7182 the corresponding BLOCK_VARS and unexpanded variable lists
7183 associated with DEST_CFUN.
7185 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7186 reimplement move_sese_region_to_fn by duplicating the region rather than
7187 moving it. */
7189 basic_block
7190 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7191 basic_block exit_bb, tree orig_block)
7193 vec<basic_block> bbs, dom_bbs;
7194 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7195 basic_block after, bb, *entry_pred, *exit_succ, abb;
7196 struct function *saved_cfun = cfun;
7197 int *entry_flag, *exit_flag;
7198 unsigned *entry_prob, *exit_prob;
7199 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7200 edge e;
7201 edge_iterator ei;
7202 htab_t new_label_map;
7203 hash_map<void *, void *> *eh_map;
7204 struct loop *loop = entry_bb->loop_father;
7205 struct loop *loop0 = get_loop (saved_cfun, 0);
7206 struct move_stmt_d d;
7208 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7209 region. */
7210 gcc_assert (entry_bb != exit_bb
7211 && (!exit_bb
7212 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7214 /* Collect all the blocks in the region. Manually add ENTRY_BB
7215 because it won't be added by dfs_enumerate_from. */
7216 bbs.create (0);
7217 bbs.safe_push (entry_bb);
7218 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7220 if (flag_checking)
7221 verify_sese (entry_bb, exit_bb, &bbs);
7223 /* The blocks that used to be dominated by something in BBS will now be
7224 dominated by the new block. */
7225 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7226 bbs.address (),
7227 bbs.length ());
7229 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7230 the predecessor edges to ENTRY_BB and the successor edges to
7231 EXIT_BB so that we can re-attach them to the new basic block that
7232 will replace the region. */
7233 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7234 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7235 entry_flag = XNEWVEC (int, num_entry_edges);
7236 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7237 i = 0;
7238 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7240 entry_prob[i] = e->probability;
7241 entry_flag[i] = e->flags;
7242 entry_pred[i++] = e->src;
7243 remove_edge (e);
7246 if (exit_bb)
7248 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7249 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7250 exit_flag = XNEWVEC (int, num_exit_edges);
7251 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7252 i = 0;
7253 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7255 exit_prob[i] = e->probability;
7256 exit_flag[i] = e->flags;
7257 exit_succ[i++] = e->dest;
7258 remove_edge (e);
7261 else
7263 num_exit_edges = 0;
7264 exit_succ = NULL;
7265 exit_flag = NULL;
7266 exit_prob = NULL;
7269 /* Switch context to the child function to initialize DEST_FN's CFG. */
7270 gcc_assert (dest_cfun->cfg == NULL);
7271 push_cfun (dest_cfun);
7273 init_empty_tree_cfg ();
7275 /* Initialize EH information for the new function. */
7276 eh_map = NULL;
7277 new_label_map = NULL;
7278 if (saved_cfun->eh)
7280 eh_region region = NULL;
7282 FOR_EACH_VEC_ELT (bbs, i, bb)
7283 region = find_outermost_region_in_block (saved_cfun, bb, region);
7285 init_eh_for_function ();
7286 if (region != NULL)
7288 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7289 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7290 new_label_mapper, new_label_map);
7294 /* Initialize an empty loop tree. */
7295 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7296 init_loops_structure (dest_cfun, loops, 1);
7297 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7298 set_loops_for_fn (dest_cfun, loops);
7300 /* Move the outlined loop tree part. */
7301 num_nodes = bbs.length ();
7302 FOR_EACH_VEC_ELT (bbs, i, bb)
7304 if (bb->loop_father->header == bb)
7306 struct loop *this_loop = bb->loop_father;
7307 struct loop *outer = loop_outer (this_loop);
7308 if (outer == loop
7309 /* If the SESE region contains some bbs ending with
7310 a noreturn call, those are considered to belong
7311 to the outermost loop in saved_cfun, rather than
7312 the entry_bb's loop_father. */
7313 || outer == loop0)
7315 if (outer != loop)
7316 num_nodes -= this_loop->num_nodes;
7317 flow_loop_tree_node_remove (bb->loop_father);
7318 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7319 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7322 else if (bb->loop_father == loop0 && loop0 != loop)
7323 num_nodes--;
7325 /* Remove loop exits from the outlined region. */
7326 if (loops_for_fn (saved_cfun)->exits)
7327 FOR_EACH_EDGE (e, ei, bb->succs)
7329 struct loops *l = loops_for_fn (saved_cfun);
7330 loop_exit **slot
7331 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7332 NO_INSERT);
7333 if (slot)
7334 l->exits->clear_slot (slot);
7339 /* Adjust the number of blocks in the tree root of the outlined part. */
7340 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7342 /* Setup a mapping to be used by move_block_to_fn. */
7343 loop->aux = current_loops->tree_root;
7344 loop0->aux = current_loops->tree_root;
7346 pop_cfun ();
7348 /* Move blocks from BBS into DEST_CFUN. */
7349 gcc_assert (bbs.length () >= 2);
7350 after = dest_cfun->cfg->x_entry_block_ptr;
7351 hash_map<tree, tree> vars_map;
7353 memset (&d, 0, sizeof (d));
7354 d.orig_block = orig_block;
7355 d.new_block = DECL_INITIAL (dest_cfun->decl);
7356 d.from_context = cfun->decl;
7357 d.to_context = dest_cfun->decl;
7358 d.vars_map = &vars_map;
7359 d.new_label_map = new_label_map;
7360 d.eh_map = eh_map;
7361 d.remap_decls_p = true;
7363 if (gimple_in_ssa_p (cfun))
7364 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7366 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7367 set_ssa_default_def (dest_cfun, arg, narg);
7368 vars_map.put (arg, narg);
7371 FOR_EACH_VEC_ELT (bbs, i, bb)
7373 /* No need to update edge counts on the last block. It has
7374 already been updated earlier when we detached the region from
7375 the original CFG. */
7376 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7377 after = bb;
7380 loop->aux = NULL;
7381 loop0->aux = NULL;
7382 /* Loop sizes are no longer correct, fix them up. */
7383 loop->num_nodes -= num_nodes;
7384 for (struct loop *outer = loop_outer (loop);
7385 outer; outer = loop_outer (outer))
7386 outer->num_nodes -= num_nodes;
7387 loop0->num_nodes -= bbs.length () - num_nodes;
7389 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7391 struct loop *aloop;
7392 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7393 if (aloop != NULL)
7395 if (aloop->simduid)
7397 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7398 d.to_context);
7399 dest_cfun->has_simduid_loops = true;
7401 if (aloop->force_vectorize)
7402 dest_cfun->has_force_vectorize_loops = true;
7406 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7407 if (orig_block)
7409 tree block;
7410 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7411 == NULL_TREE);
7412 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7413 = BLOCK_SUBBLOCKS (orig_block);
7414 for (block = BLOCK_SUBBLOCKS (orig_block);
7415 block; block = BLOCK_CHAIN (block))
7416 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7417 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7420 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7421 &vars_map, dest_cfun->decl);
7423 if (new_label_map)
7424 htab_delete (new_label_map);
7425 if (eh_map)
7426 delete eh_map;
7428 if (gimple_in_ssa_p (cfun))
7430 /* We need to release ssa-names in a defined order, so first find them,
7431 and then iterate in ascending version order. */
7432 bitmap release_names = BITMAP_ALLOC (NULL);
7433 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7434 bitmap_iterator bi;
7435 unsigned i;
7436 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7437 release_ssa_name (ssa_name (i));
7438 BITMAP_FREE (release_names);
7441 /* Rewire the entry and exit blocks. The successor to the entry
7442 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7443 the child function. Similarly, the predecessor of DEST_FN's
7444 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7445 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7446 various CFG manipulation function get to the right CFG.
7448 FIXME, this is silly. The CFG ought to become a parameter to
7449 these helpers. */
7450 push_cfun (dest_cfun);
7451 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7452 if (exit_bb)
7453 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7454 pop_cfun ();
7456 /* Back in the original function, the SESE region has disappeared,
7457 create a new basic block in its place. */
7458 bb = create_empty_bb (entry_pred[0]);
7459 if (current_loops)
7460 add_bb_to_loop (bb, loop);
7461 for (i = 0; i < num_entry_edges; i++)
7463 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7464 e->probability = entry_prob[i];
7467 for (i = 0; i < num_exit_edges; i++)
7469 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7470 e->probability = exit_prob[i];
7473 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7474 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7475 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7476 dom_bbs.release ();
7478 if (exit_bb)
7480 free (exit_prob);
7481 free (exit_flag);
7482 free (exit_succ);
7484 free (entry_prob);
7485 free (entry_flag);
7486 free (entry_pred);
7487 bbs.release ();
7489 return bb;
7492 /* Dump default def DEF to file FILE using FLAGS and indentation
7493 SPC. */
7495 static void
7496 dump_default_def (FILE *file, tree def, int spc, int flags)
7498 for (int i = 0; i < spc; ++i)
7499 fprintf (file, " ");
7500 dump_ssaname_info_to_file (file, def, spc);
7502 print_generic_expr (file, TREE_TYPE (def), flags);
7503 fprintf (file, " ");
7504 print_generic_expr (file, def, flags);
7505 fprintf (file, " = ");
7506 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7507 fprintf (file, ";\n");
7510 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7513 void
7514 dump_function_to_file (tree fndecl, FILE *file, int flags)
7516 tree arg, var, old_current_fndecl = current_function_decl;
7517 struct function *dsf;
7518 bool ignore_topmost_bind = false, any_var = false;
7519 basic_block bb;
7520 tree chain;
7521 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7522 && decl_is_tm_clone (fndecl));
7523 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7525 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7527 fprintf (file, "__attribute__((");
7529 bool first = true;
7530 tree chain;
7531 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7532 first = false, chain = TREE_CHAIN (chain))
7534 if (!first)
7535 fprintf (file, ", ");
7537 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7538 if (TREE_VALUE (chain) != NULL_TREE)
7540 fprintf (file, " (");
7541 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7542 fprintf (file, ")");
7546 fprintf (file, "))\n");
7549 current_function_decl = fndecl;
7550 if (flags & TDF_GIMPLE)
7552 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7553 dump_flags | TDF_SLIM);
7554 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7556 else
7557 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7559 arg = DECL_ARGUMENTS (fndecl);
7560 while (arg)
7562 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7563 fprintf (file, " ");
7564 print_generic_expr (file, arg, dump_flags);
7565 if (flags & TDF_VERBOSE)
7566 print_node (file, "", arg, 4);
7567 if (DECL_CHAIN (arg))
7568 fprintf (file, ", ");
7569 arg = DECL_CHAIN (arg);
7571 fprintf (file, ")\n");
7573 if (flags & TDF_VERBOSE)
7574 print_node (file, "", fndecl, 2);
7576 dsf = DECL_STRUCT_FUNCTION (fndecl);
7577 if (dsf && (flags & TDF_EH))
7578 dump_eh_tree (file, dsf);
7580 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7582 dump_node (fndecl, TDF_SLIM | flags, file);
7583 current_function_decl = old_current_fndecl;
7584 return;
7587 /* When GIMPLE is lowered, the variables are no longer available in
7588 BIND_EXPRs, so display them separately. */
7589 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7591 unsigned ix;
7592 ignore_topmost_bind = true;
7594 fprintf (file, "{\n");
7595 if (gimple_in_ssa_p (fun)
7596 && (flags & TDF_ALIAS))
7598 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7599 arg = DECL_CHAIN (arg))
7601 tree def = ssa_default_def (fun, arg);
7602 if (def)
7603 dump_default_def (file, def, 2, flags);
7606 tree res = DECL_RESULT (fun->decl);
7607 if (res != NULL_TREE
7608 && DECL_BY_REFERENCE (res))
7610 tree def = ssa_default_def (fun, res);
7611 if (def)
7612 dump_default_def (file, def, 2, flags);
7615 tree static_chain = fun->static_chain_decl;
7616 if (static_chain != NULL_TREE)
7618 tree def = ssa_default_def (fun, static_chain);
7619 if (def)
7620 dump_default_def (file, def, 2, flags);
7624 if (!vec_safe_is_empty (fun->local_decls))
7625 FOR_EACH_LOCAL_DECL (fun, ix, var)
7627 print_generic_decl (file, var, flags);
7628 if (flags & TDF_VERBOSE)
7629 print_node (file, "", var, 4);
7630 fprintf (file, "\n");
7632 any_var = true;
7635 tree name;
7637 if (gimple_in_ssa_p (cfun))
7638 FOR_EACH_SSA_NAME (ix, name, cfun)
7640 if (!SSA_NAME_VAR (name))
7642 fprintf (file, " ");
7643 print_generic_expr (file, TREE_TYPE (name), flags);
7644 fprintf (file, " ");
7645 print_generic_expr (file, name, flags);
7646 fprintf (file, ";\n");
7648 any_var = true;
7653 if (fun && fun->decl == fndecl
7654 && fun->cfg
7655 && basic_block_info_for_fn (fun))
7657 /* If the CFG has been built, emit a CFG-based dump. */
7658 if (!ignore_topmost_bind)
7659 fprintf (file, "{\n");
7661 if (any_var && n_basic_blocks_for_fn (fun))
7662 fprintf (file, "\n");
7664 FOR_EACH_BB_FN (bb, fun)
7665 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7667 fprintf (file, "}\n");
7669 else if (fun->curr_properties & PROP_gimple_any)
7671 /* The function is now in GIMPLE form but the CFG has not been
7672 built yet. Emit the single sequence of GIMPLE statements
7673 that make up its body. */
7674 gimple_seq body = gimple_body (fndecl);
7676 if (gimple_seq_first_stmt (body)
7677 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7678 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7679 print_gimple_seq (file, body, 0, flags);
7680 else
7682 if (!ignore_topmost_bind)
7683 fprintf (file, "{\n");
7685 if (any_var)
7686 fprintf (file, "\n");
7688 print_gimple_seq (file, body, 2, flags);
7689 fprintf (file, "}\n");
7692 else
7694 int indent;
7696 /* Make a tree based dump. */
7697 chain = DECL_SAVED_TREE (fndecl);
7698 if (chain && TREE_CODE (chain) == BIND_EXPR)
7700 if (ignore_topmost_bind)
7702 chain = BIND_EXPR_BODY (chain);
7703 indent = 2;
7705 else
7706 indent = 0;
7708 else
7710 if (!ignore_topmost_bind)
7712 fprintf (file, "{\n");
7713 /* No topmost bind, pretend it's ignored for later. */
7714 ignore_topmost_bind = true;
7716 indent = 2;
7719 if (any_var)
7720 fprintf (file, "\n");
7722 print_generic_stmt_indented (file, chain, flags, indent);
7723 if (ignore_topmost_bind)
7724 fprintf (file, "}\n");
7727 if (flags & TDF_ENUMERATE_LOCALS)
7728 dump_enumerated_decls (file, flags);
7729 fprintf (file, "\n\n");
7731 current_function_decl = old_current_fndecl;
7734 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7736 DEBUG_FUNCTION void
7737 debug_function (tree fn, int flags)
7739 dump_function_to_file (fn, stderr, flags);
7743 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7745 static void
7746 print_pred_bbs (FILE *file, basic_block bb)
7748 edge e;
7749 edge_iterator ei;
7751 FOR_EACH_EDGE (e, ei, bb->preds)
7752 fprintf (file, "bb_%d ", e->src->index);
7756 /* Print on FILE the indexes for the successors of basic_block BB. */
7758 static void
7759 print_succ_bbs (FILE *file, basic_block bb)
7761 edge e;
7762 edge_iterator ei;
7764 FOR_EACH_EDGE (e, ei, bb->succs)
7765 fprintf (file, "bb_%d ", e->dest->index);
7768 /* Print to FILE the basic block BB following the VERBOSITY level. */
7770 void
7771 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7773 char *s_indent = (char *) alloca ((size_t) indent + 1);
7774 memset ((void *) s_indent, ' ', (size_t) indent);
7775 s_indent[indent] = '\0';
7777 /* Print basic_block's header. */
7778 if (verbosity >= 2)
7780 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7781 print_pred_bbs (file, bb);
7782 fprintf (file, "}, succs = {");
7783 print_succ_bbs (file, bb);
7784 fprintf (file, "})\n");
7787 /* Print basic_block's body. */
7788 if (verbosity >= 3)
7790 fprintf (file, "%s {\n", s_indent);
7791 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7792 fprintf (file, "%s }\n", s_indent);
7796 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7798 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7799 VERBOSITY level this outputs the contents of the loop, or just its
7800 structure. */
7802 static void
7803 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7805 char *s_indent;
7806 basic_block bb;
7808 if (loop == NULL)
7809 return;
7811 s_indent = (char *) alloca ((size_t) indent + 1);
7812 memset ((void *) s_indent, ' ', (size_t) indent);
7813 s_indent[indent] = '\0';
7815 /* Print loop's header. */
7816 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7817 if (loop->header)
7818 fprintf (file, "header = %d", loop->header->index);
7819 else
7821 fprintf (file, "deleted)\n");
7822 return;
7824 if (loop->latch)
7825 fprintf (file, ", latch = %d", loop->latch->index);
7826 else
7827 fprintf (file, ", multiple latches");
7828 fprintf (file, ", niter = ");
7829 print_generic_expr (file, loop->nb_iterations, 0);
7831 if (loop->any_upper_bound)
7833 fprintf (file, ", upper_bound = ");
7834 print_decu (loop->nb_iterations_upper_bound, file);
7836 if (loop->any_likely_upper_bound)
7838 fprintf (file, ", likely_upper_bound = ");
7839 print_decu (loop->nb_iterations_likely_upper_bound, file);
7842 if (loop->any_estimate)
7844 fprintf (file, ", estimate = ");
7845 print_decu (loop->nb_iterations_estimate, file);
7847 fprintf (file, ")\n");
7849 /* Print loop's body. */
7850 if (verbosity >= 1)
7852 fprintf (file, "%s{\n", s_indent);
7853 FOR_EACH_BB_FN (bb, cfun)
7854 if (bb->loop_father == loop)
7855 print_loops_bb (file, bb, indent, verbosity);
7857 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7858 fprintf (file, "%s}\n", s_indent);
7862 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7863 spaces. Following VERBOSITY level this outputs the contents of the
7864 loop, or just its structure. */
7866 static void
7867 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7868 int verbosity)
7870 if (loop == NULL)
7871 return;
7873 print_loop (file, loop, indent, verbosity);
7874 print_loop_and_siblings (file, loop->next, indent, verbosity);
7877 /* Follow a CFG edge from the entry point of the program, and on entry
7878 of a loop, pretty print the loop structure on FILE. */
7880 void
7881 print_loops (FILE *file, int verbosity)
7883 basic_block bb;
7885 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7886 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7887 if (bb && bb->loop_father)
7888 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7891 /* Dump a loop. */
7893 DEBUG_FUNCTION void
7894 debug (struct loop &ref)
7896 print_loop (stderr, &ref, 0, /*verbosity*/0);
7899 DEBUG_FUNCTION void
7900 debug (struct loop *ptr)
7902 if (ptr)
7903 debug (*ptr);
7904 else
7905 fprintf (stderr, "<nil>\n");
7908 /* Dump a loop verbosely. */
7910 DEBUG_FUNCTION void
7911 debug_verbose (struct loop &ref)
7913 print_loop (stderr, &ref, 0, /*verbosity*/3);
7916 DEBUG_FUNCTION void
7917 debug_verbose (struct loop *ptr)
7919 if (ptr)
7920 debug (*ptr);
7921 else
7922 fprintf (stderr, "<nil>\n");
7926 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7928 DEBUG_FUNCTION void
7929 debug_loops (int verbosity)
7931 print_loops (stderr, verbosity);
7934 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7936 DEBUG_FUNCTION void
7937 debug_loop (struct loop *loop, int verbosity)
7939 print_loop (stderr, loop, 0, verbosity);
7942 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7943 level. */
7945 DEBUG_FUNCTION void
7946 debug_loop_num (unsigned num, int verbosity)
7948 debug_loop (get_loop (cfun, num), verbosity);
7951 /* Return true if BB ends with a call, possibly followed by some
7952 instructions that must stay with the call. Return false,
7953 otherwise. */
7955 static bool
7956 gimple_block_ends_with_call_p (basic_block bb)
7958 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7959 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7963 /* Return true if BB ends with a conditional branch. Return false,
7964 otherwise. */
7966 static bool
7967 gimple_block_ends_with_condjump_p (const_basic_block bb)
7969 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7970 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7974 /* Return true if statement T may terminate execution of BB in ways not
7975 explicitly represtented in the CFG. */
7977 bool
7978 stmt_can_terminate_bb_p (gimple *t)
7980 tree fndecl = NULL_TREE;
7981 int call_flags = 0;
7983 /* Eh exception not handled internally terminates execution of the whole
7984 function. */
7985 if (stmt_can_throw_external (t))
7986 return true;
7988 /* NORETURN and LONGJMP calls already have an edge to exit.
7989 CONST and PURE calls do not need one.
7990 We don't currently check for CONST and PURE here, although
7991 it would be a good idea, because those attributes are
7992 figured out from the RTL in mark_constant_function, and
7993 the counter incrementation code from -fprofile-arcs
7994 leads to different results from -fbranch-probabilities. */
7995 if (is_gimple_call (t))
7997 fndecl = gimple_call_fndecl (t);
7998 call_flags = gimple_call_flags (t);
8001 if (is_gimple_call (t)
8002 && fndecl
8003 && DECL_BUILT_IN (fndecl)
8004 && (call_flags & ECF_NOTHROW)
8005 && !(call_flags & ECF_RETURNS_TWICE)
8006 /* fork() doesn't really return twice, but the effect of
8007 wrapping it in __gcov_fork() which calls __gcov_flush()
8008 and clears the counters before forking has the same
8009 effect as returning twice. Force a fake edge. */
8010 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8011 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8012 return false;
8014 if (is_gimple_call (t))
8016 edge_iterator ei;
8017 edge e;
8018 basic_block bb;
8020 if (call_flags & (ECF_PURE | ECF_CONST)
8021 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8022 return false;
8024 /* Function call may do longjmp, terminate program or do other things.
8025 Special case noreturn that have non-abnormal edges out as in this case
8026 the fact is sufficiently represented by lack of edges out of T. */
8027 if (!(call_flags & ECF_NORETURN))
8028 return true;
8030 bb = gimple_bb (t);
8031 FOR_EACH_EDGE (e, ei, bb->succs)
8032 if ((e->flags & EDGE_FAKE) == 0)
8033 return true;
8036 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8037 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8038 return true;
8040 return false;
8044 /* Add fake edges to the function exit for any non constant and non
8045 noreturn calls (or noreturn calls with EH/abnormal edges),
8046 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8047 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8048 that were split.
8050 The goal is to expose cases in which entering a basic block does
8051 not imply that all subsequent instructions must be executed. */
8053 static int
8054 gimple_flow_call_edges_add (sbitmap blocks)
8056 int i;
8057 int blocks_split = 0;
8058 int last_bb = last_basic_block_for_fn (cfun);
8059 bool check_last_block = false;
8061 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8062 return 0;
8064 if (! blocks)
8065 check_last_block = true;
8066 else
8067 check_last_block = bitmap_bit_p (blocks,
8068 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8070 /* In the last basic block, before epilogue generation, there will be
8071 a fallthru edge to EXIT. Special care is required if the last insn
8072 of the last basic block is a call because make_edge folds duplicate
8073 edges, which would result in the fallthru edge also being marked
8074 fake, which would result in the fallthru edge being removed by
8075 remove_fake_edges, which would result in an invalid CFG.
8077 Moreover, we can't elide the outgoing fake edge, since the block
8078 profiler needs to take this into account in order to solve the minimal
8079 spanning tree in the case that the call doesn't return.
8081 Handle this by adding a dummy instruction in a new last basic block. */
8082 if (check_last_block)
8084 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8085 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8086 gimple *t = NULL;
8088 if (!gsi_end_p (gsi))
8089 t = gsi_stmt (gsi);
8091 if (t && stmt_can_terminate_bb_p (t))
8093 edge e;
8095 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8096 if (e)
8098 gsi_insert_on_edge (e, gimple_build_nop ());
8099 gsi_commit_edge_inserts ();
8104 /* Now add fake edges to the function exit for any non constant
8105 calls since there is no way that we can determine if they will
8106 return or not... */
8107 for (i = 0; i < last_bb; i++)
8109 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8110 gimple_stmt_iterator gsi;
8111 gimple *stmt, *last_stmt;
8113 if (!bb)
8114 continue;
8116 if (blocks && !bitmap_bit_p (blocks, i))
8117 continue;
8119 gsi = gsi_last_nondebug_bb (bb);
8120 if (!gsi_end_p (gsi))
8122 last_stmt = gsi_stmt (gsi);
8125 stmt = gsi_stmt (gsi);
8126 if (stmt_can_terminate_bb_p (stmt))
8128 edge e;
8130 /* The handling above of the final block before the
8131 epilogue should be enough to verify that there is
8132 no edge to the exit block in CFG already.
8133 Calling make_edge in such case would cause us to
8134 mark that edge as fake and remove it later. */
8135 if (flag_checking && stmt == last_stmt)
8137 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8138 gcc_assert (e == NULL);
8141 /* Note that the following may create a new basic block
8142 and renumber the existing basic blocks. */
8143 if (stmt != last_stmt)
8145 e = split_block (bb, stmt);
8146 if (e)
8147 blocks_split++;
8149 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8151 gsi_prev (&gsi);
8153 while (!gsi_end_p (gsi));
8157 if (blocks_split)
8158 verify_flow_info ();
8160 return blocks_split;
8163 /* Removes edge E and all the blocks dominated by it, and updates dominance
8164 information. The IL in E->src needs to be updated separately.
8165 If dominance info is not available, only the edge E is removed.*/
8167 void
8168 remove_edge_and_dominated_blocks (edge e)
8170 vec<basic_block> bbs_to_remove = vNULL;
8171 vec<basic_block> bbs_to_fix_dom = vNULL;
8172 bitmap df, df_idom;
8173 edge f;
8174 edge_iterator ei;
8175 bool none_removed = false;
8176 unsigned i;
8177 basic_block bb, dbb;
8178 bitmap_iterator bi;
8180 /* If we are removing a path inside a non-root loop that may change
8181 loop ownership of blocks or remove loops. Mark loops for fixup. */
8182 if (current_loops
8183 && loop_outer (e->src->loop_father) != NULL
8184 && e->src->loop_father == e->dest->loop_father)
8185 loops_state_set (LOOPS_NEED_FIXUP);
8187 if (!dom_info_available_p (CDI_DOMINATORS))
8189 remove_edge (e);
8190 return;
8193 /* No updating is needed for edges to exit. */
8194 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8196 if (cfgcleanup_altered_bbs)
8197 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8198 remove_edge (e);
8199 return;
8202 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8203 that is not dominated by E->dest, then this set is empty. Otherwise,
8204 all the basic blocks dominated by E->dest are removed.
8206 Also, to DF_IDOM we store the immediate dominators of the blocks in
8207 the dominance frontier of E (i.e., of the successors of the
8208 removed blocks, if there are any, and of E->dest otherwise). */
8209 FOR_EACH_EDGE (f, ei, e->dest->preds)
8211 if (f == e)
8212 continue;
8214 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8216 none_removed = true;
8217 break;
8221 df = BITMAP_ALLOC (NULL);
8222 df_idom = BITMAP_ALLOC (NULL);
8224 if (none_removed)
8225 bitmap_set_bit (df_idom,
8226 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8227 else
8229 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8230 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8232 FOR_EACH_EDGE (f, ei, bb->succs)
8234 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8235 bitmap_set_bit (df, f->dest->index);
8238 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8239 bitmap_clear_bit (df, bb->index);
8241 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8243 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8244 bitmap_set_bit (df_idom,
8245 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8249 if (cfgcleanup_altered_bbs)
8251 /* Record the set of the altered basic blocks. */
8252 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8253 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8256 /* Remove E and the cancelled blocks. */
8257 if (none_removed)
8258 remove_edge (e);
8259 else
8261 /* Walk backwards so as to get a chance to substitute all
8262 released DEFs into debug stmts. See
8263 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8264 details. */
8265 for (i = bbs_to_remove.length (); i-- > 0; )
8266 delete_basic_block (bbs_to_remove[i]);
8269 /* Update the dominance information. The immediate dominator may change only
8270 for blocks whose immediate dominator belongs to DF_IDOM:
8272 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8273 removal. Let Z the arbitrary block such that idom(Z) = Y and
8274 Z dominates X after the removal. Before removal, there exists a path P
8275 from Y to X that avoids Z. Let F be the last edge on P that is
8276 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8277 dominates W, and because of P, Z does not dominate W), and W belongs to
8278 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8279 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8281 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8282 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8283 dbb;
8284 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8285 bbs_to_fix_dom.safe_push (dbb);
8288 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8290 BITMAP_FREE (df);
8291 BITMAP_FREE (df_idom);
8292 bbs_to_remove.release ();
8293 bbs_to_fix_dom.release ();
8296 /* Purge dead EH edges from basic block BB. */
8298 bool
8299 gimple_purge_dead_eh_edges (basic_block bb)
8301 bool changed = false;
8302 edge e;
8303 edge_iterator ei;
8304 gimple *stmt = last_stmt (bb);
8306 if (stmt && stmt_can_throw_internal (stmt))
8307 return false;
8309 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8311 if (e->flags & EDGE_EH)
8313 remove_edge_and_dominated_blocks (e);
8314 changed = true;
8316 else
8317 ei_next (&ei);
8320 return changed;
8323 /* Purge dead EH edges from basic block listed in BLOCKS. */
8325 bool
8326 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8328 bool changed = false;
8329 unsigned i;
8330 bitmap_iterator bi;
8332 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8334 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8336 /* Earlier gimple_purge_dead_eh_edges could have removed
8337 this basic block already. */
8338 gcc_assert (bb || changed);
8339 if (bb != NULL)
8340 changed |= gimple_purge_dead_eh_edges (bb);
8343 return changed;
8346 /* Purge dead abnormal call edges from basic block BB. */
8348 bool
8349 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8351 bool changed = false;
8352 edge e;
8353 edge_iterator ei;
8354 gimple *stmt = last_stmt (bb);
8356 if (!cfun->has_nonlocal_label
8357 && !cfun->calls_setjmp)
8358 return false;
8360 if (stmt && stmt_can_make_abnormal_goto (stmt))
8361 return false;
8363 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8365 if (e->flags & EDGE_ABNORMAL)
8367 if (e->flags & EDGE_FALLTHRU)
8368 e->flags &= ~EDGE_ABNORMAL;
8369 else
8370 remove_edge_and_dominated_blocks (e);
8371 changed = true;
8373 else
8374 ei_next (&ei);
8377 return changed;
8380 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8382 bool
8383 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8385 bool changed = false;
8386 unsigned i;
8387 bitmap_iterator bi;
8389 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8391 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8393 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8394 this basic block already. */
8395 gcc_assert (bb || changed);
8396 if (bb != NULL)
8397 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8400 return changed;
8403 /* This function is called whenever a new edge is created or
8404 redirected. */
8406 static void
8407 gimple_execute_on_growing_pred (edge e)
8409 basic_block bb = e->dest;
8411 if (!gimple_seq_empty_p (phi_nodes (bb)))
8412 reserve_phi_args_for_new_edge (bb);
8415 /* This function is called immediately before edge E is removed from
8416 the edge vector E->dest->preds. */
8418 static void
8419 gimple_execute_on_shrinking_pred (edge e)
8421 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8422 remove_phi_args (e);
8425 /*---------------------------------------------------------------------------
8426 Helper functions for Loop versioning
8427 ---------------------------------------------------------------------------*/
8429 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8430 of 'first'. Both of them are dominated by 'new_head' basic block. When
8431 'new_head' was created by 'second's incoming edge it received phi arguments
8432 on the edge by split_edge(). Later, additional edge 'e' was created to
8433 connect 'new_head' and 'first'. Now this routine adds phi args on this
8434 additional edge 'e' that new_head to second edge received as part of edge
8435 splitting. */
8437 static void
8438 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8439 basic_block new_head, edge e)
8441 gphi *phi1, *phi2;
8442 gphi_iterator psi1, psi2;
8443 tree def;
8444 edge e2 = find_edge (new_head, second);
8446 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8447 edge, we should always have an edge from NEW_HEAD to SECOND. */
8448 gcc_assert (e2 != NULL);
8450 /* Browse all 'second' basic block phi nodes and add phi args to
8451 edge 'e' for 'first' head. PHI args are always in correct order. */
8453 for (psi2 = gsi_start_phis (second),
8454 psi1 = gsi_start_phis (first);
8455 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8456 gsi_next (&psi2), gsi_next (&psi1))
8458 phi1 = psi1.phi ();
8459 phi2 = psi2.phi ();
8460 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8461 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8466 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8467 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8468 the destination of the ELSE part. */
8470 static void
8471 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8472 basic_block second_head ATTRIBUTE_UNUSED,
8473 basic_block cond_bb, void *cond_e)
8475 gimple_stmt_iterator gsi;
8476 gimple *new_cond_expr;
8477 tree cond_expr = (tree) cond_e;
8478 edge e0;
8480 /* Build new conditional expr */
8481 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8482 NULL_TREE, NULL_TREE);
8484 /* Add new cond in cond_bb. */
8485 gsi = gsi_last_bb (cond_bb);
8486 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8488 /* Adjust edges appropriately to connect new head with first head
8489 as well as second head. */
8490 e0 = single_succ_edge (cond_bb);
8491 e0->flags &= ~EDGE_FALLTHRU;
8492 e0->flags |= EDGE_FALSE_VALUE;
8496 /* Do book-keeping of basic block BB for the profile consistency checker.
8497 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8498 then do post-pass accounting. Store the counting in RECORD. */
8499 static void
8500 gimple_account_profile_record (basic_block bb, int after_pass,
8501 struct profile_record *record)
8503 gimple_stmt_iterator i;
8504 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8506 record->size[after_pass]
8507 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8508 if (profile_status_for_fn (cfun) == PROFILE_READ)
8509 record->time[after_pass]
8510 += estimate_num_insns (gsi_stmt (i),
8511 &eni_time_weights) * bb->count;
8512 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8513 record->time[after_pass]
8514 += estimate_num_insns (gsi_stmt (i),
8515 &eni_time_weights) * bb->frequency;
8519 struct cfg_hooks gimple_cfg_hooks = {
8520 "gimple",
8521 gimple_verify_flow_info,
8522 gimple_dump_bb, /* dump_bb */
8523 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8524 create_bb, /* create_basic_block */
8525 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8526 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8527 gimple_can_remove_branch_p, /* can_remove_branch_p */
8528 remove_bb, /* delete_basic_block */
8529 gimple_split_block, /* split_block */
8530 gimple_move_block_after, /* move_block_after */
8531 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8532 gimple_merge_blocks, /* merge_blocks */
8533 gimple_predict_edge, /* predict_edge */
8534 gimple_predicted_by_p, /* predicted_by_p */
8535 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8536 gimple_duplicate_bb, /* duplicate_block */
8537 gimple_split_edge, /* split_edge */
8538 gimple_make_forwarder_block, /* make_forward_block */
8539 NULL, /* tidy_fallthru_edge */
8540 NULL, /* force_nonfallthru */
8541 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8542 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8543 gimple_flow_call_edges_add, /* flow_call_edges_add */
8544 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8545 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8546 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8547 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8548 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8549 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8550 flush_pending_stmts, /* flush_pending_stmts */
8551 gimple_empty_block_p, /* block_empty_p */
8552 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8553 gimple_account_profile_record,
8557 /* Split all critical edges. */
8559 unsigned int
8560 split_critical_edges (void)
8562 basic_block bb;
8563 edge e;
8564 edge_iterator ei;
8566 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8567 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8568 mappings around the calls to split_edge. */
8569 start_recording_case_labels ();
8570 FOR_ALL_BB_FN (bb, cfun)
8572 FOR_EACH_EDGE (e, ei, bb->succs)
8574 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8575 split_edge (e);
8576 /* PRE inserts statements to edges and expects that
8577 since split_critical_edges was done beforehand, committing edge
8578 insertions will not split more edges. In addition to critical
8579 edges we must split edges that have multiple successors and
8580 end by control flow statements, such as RESX.
8581 Go ahead and split them too. This matches the logic in
8582 gimple_find_edge_insert_loc. */
8583 else if ((!single_pred_p (e->dest)
8584 || !gimple_seq_empty_p (phi_nodes (e->dest))
8585 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8586 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8587 && !(e->flags & EDGE_ABNORMAL))
8589 gimple_stmt_iterator gsi;
8591 gsi = gsi_last_bb (e->src);
8592 if (!gsi_end_p (gsi)
8593 && stmt_ends_bb_p (gsi_stmt (gsi))
8594 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8595 && !gimple_call_builtin_p (gsi_stmt (gsi),
8596 BUILT_IN_RETURN)))
8597 split_edge (e);
8601 end_recording_case_labels ();
8602 return 0;
8605 namespace {
8607 const pass_data pass_data_split_crit_edges =
8609 GIMPLE_PASS, /* type */
8610 "crited", /* name */
8611 OPTGROUP_NONE, /* optinfo_flags */
8612 TV_TREE_SPLIT_EDGES, /* tv_id */
8613 PROP_cfg, /* properties_required */
8614 PROP_no_crit_edges, /* properties_provided */
8615 0, /* properties_destroyed */
8616 0, /* todo_flags_start */
8617 0, /* todo_flags_finish */
8620 class pass_split_crit_edges : public gimple_opt_pass
8622 public:
8623 pass_split_crit_edges (gcc::context *ctxt)
8624 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8627 /* opt_pass methods: */
8628 virtual unsigned int execute (function *) { return split_critical_edges (); }
8630 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8631 }; // class pass_split_crit_edges
8633 } // anon namespace
8635 gimple_opt_pass *
8636 make_pass_split_crit_edges (gcc::context *ctxt)
8638 return new pass_split_crit_edges (ctxt);
8642 /* Insert COND expression which is GIMPLE_COND after STMT
8643 in basic block BB with appropriate basic block split
8644 and creation of a new conditionally executed basic block.
8645 Return created basic block. */
8646 basic_block
8647 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8649 edge fall = split_block (bb, stmt);
8650 gimple_stmt_iterator iter = gsi_last_bb (bb);
8651 basic_block new_bb;
8653 /* Insert cond statement. */
8654 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8655 if (gsi_end_p (iter))
8656 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8657 else
8658 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8660 /* Create conditionally executed block. */
8661 new_bb = create_empty_bb (bb);
8662 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8663 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8665 /* Fix edge for split bb. */
8666 fall->flags = EDGE_FALSE_VALUE;
8668 /* Update dominance info. */
8669 if (dom_info_available_p (CDI_DOMINATORS))
8671 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8672 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8675 /* Update loop info. */
8676 if (current_loops)
8677 add_bb_to_loop (new_bb, bb->loop_father);
8679 return new_bb;
8682 /* Build a ternary operation and gimplify it. Emit code before GSI.
8683 Return the gimple_val holding the result. */
8685 tree
8686 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8687 tree type, tree a, tree b, tree c)
8689 tree ret;
8690 location_t loc = gimple_location (gsi_stmt (*gsi));
8692 ret = fold_build3_loc (loc, code, type, a, b, c);
8693 STRIP_NOPS (ret);
8695 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8696 GSI_SAME_STMT);
8699 /* Build a binary operation and gimplify it. Emit code before GSI.
8700 Return the gimple_val holding the result. */
8702 tree
8703 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8704 tree type, tree a, tree b)
8706 tree ret;
8708 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8709 STRIP_NOPS (ret);
8711 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8712 GSI_SAME_STMT);
8715 /* Build a unary operation and gimplify it. Emit code before GSI.
8716 Return the gimple_val holding the result. */
8718 tree
8719 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8720 tree a)
8722 tree ret;
8724 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8725 STRIP_NOPS (ret);
8727 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8728 GSI_SAME_STMT);
8733 /* Given a basic block B which ends with a conditional and has
8734 precisely two successors, determine which of the edges is taken if
8735 the conditional is true and which is taken if the conditional is
8736 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8738 void
8739 extract_true_false_edges_from_block (basic_block b,
8740 edge *true_edge,
8741 edge *false_edge)
8743 edge e = EDGE_SUCC (b, 0);
8745 if (e->flags & EDGE_TRUE_VALUE)
8747 *true_edge = e;
8748 *false_edge = EDGE_SUCC (b, 1);
8750 else
8752 *false_edge = e;
8753 *true_edge = EDGE_SUCC (b, 1);
8758 /* From a controlling predicate in the immediate dominator DOM of
8759 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8760 predicate evaluates to true and false and store them to
8761 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8762 they are non-NULL. Returns true if the edges can be determined,
8763 else return false. */
8765 bool
8766 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8767 edge *true_controlled_edge,
8768 edge *false_controlled_edge)
8770 basic_block bb = phiblock;
8771 edge true_edge, false_edge, tem;
8772 edge e0 = NULL, e1 = NULL;
8774 /* We have to verify that one edge into the PHI node is dominated
8775 by the true edge of the predicate block and the other edge
8776 dominated by the false edge. This ensures that the PHI argument
8777 we are going to take is completely determined by the path we
8778 take from the predicate block.
8779 We can only use BB dominance checks below if the destination of
8780 the true/false edges are dominated by their edge, thus only
8781 have a single predecessor. */
8782 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8783 tem = EDGE_PRED (bb, 0);
8784 if (tem == true_edge
8785 || (single_pred_p (true_edge->dest)
8786 && (tem->src == true_edge->dest
8787 || dominated_by_p (CDI_DOMINATORS,
8788 tem->src, true_edge->dest))))
8789 e0 = tem;
8790 else if (tem == false_edge
8791 || (single_pred_p (false_edge->dest)
8792 && (tem->src == false_edge->dest
8793 || dominated_by_p (CDI_DOMINATORS,
8794 tem->src, false_edge->dest))))
8795 e1 = tem;
8796 else
8797 return false;
8798 tem = EDGE_PRED (bb, 1);
8799 if (tem == true_edge
8800 || (single_pred_p (true_edge->dest)
8801 && (tem->src == true_edge->dest
8802 || dominated_by_p (CDI_DOMINATORS,
8803 tem->src, true_edge->dest))))
8804 e0 = tem;
8805 else if (tem == false_edge
8806 || (single_pred_p (false_edge->dest)
8807 && (tem->src == false_edge->dest
8808 || dominated_by_p (CDI_DOMINATORS,
8809 tem->src, false_edge->dest))))
8810 e1 = tem;
8811 else
8812 return false;
8813 if (!e0 || !e1)
8814 return false;
8816 if (true_controlled_edge)
8817 *true_controlled_edge = e0;
8818 if (false_controlled_edge)
8819 *false_controlled_edge = e1;
8821 return true;
8826 /* Emit return warnings. */
8828 namespace {
8830 const pass_data pass_data_warn_function_return =
8832 GIMPLE_PASS, /* type */
8833 "*warn_function_return", /* name */
8834 OPTGROUP_NONE, /* optinfo_flags */
8835 TV_NONE, /* tv_id */
8836 PROP_cfg, /* properties_required */
8837 0, /* properties_provided */
8838 0, /* properties_destroyed */
8839 0, /* todo_flags_start */
8840 0, /* todo_flags_finish */
8843 class pass_warn_function_return : public gimple_opt_pass
8845 public:
8846 pass_warn_function_return (gcc::context *ctxt)
8847 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8850 /* opt_pass methods: */
8851 virtual unsigned int execute (function *);
8853 }; // class pass_warn_function_return
8855 unsigned int
8856 pass_warn_function_return::execute (function *fun)
8858 source_location location;
8859 gimple *last;
8860 edge e;
8861 edge_iterator ei;
8863 if (!targetm.warn_func_return (fun->decl))
8864 return 0;
8866 /* If we have a path to EXIT, then we do return. */
8867 if (TREE_THIS_VOLATILE (fun->decl)
8868 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8870 location = UNKNOWN_LOCATION;
8871 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8873 last = last_stmt (e->src);
8874 if ((gimple_code (last) == GIMPLE_RETURN
8875 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8876 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8877 break;
8879 if (location == UNKNOWN_LOCATION)
8880 location = cfun->function_end_locus;
8881 warning_at (location, 0, "%<noreturn%> function does return");
8884 /* If we see "return;" in some basic block, then we do reach the end
8885 without returning a value. */
8886 else if (warn_return_type
8887 && !TREE_NO_WARNING (fun->decl)
8888 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8889 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8891 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8893 gimple *last = last_stmt (e->src);
8894 greturn *return_stmt = dyn_cast <greturn *> (last);
8895 if (return_stmt
8896 && gimple_return_retval (return_stmt) == NULL
8897 && !gimple_no_warning_p (last))
8899 location = gimple_location (last);
8900 if (location == UNKNOWN_LOCATION)
8901 location = fun->function_end_locus;
8902 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8903 TREE_NO_WARNING (fun->decl) = 1;
8904 break;
8908 return 0;
8911 } // anon namespace
8913 gimple_opt_pass *
8914 make_pass_warn_function_return (gcc::context *ctxt)
8916 return new pass_warn_function_return (ctxt);
8919 /* Walk a gimplified function and warn for functions whose return value is
8920 ignored and attribute((warn_unused_result)) is set. This is done before
8921 inlining, so we don't have to worry about that. */
8923 static void
8924 do_warn_unused_result (gimple_seq seq)
8926 tree fdecl, ftype;
8927 gimple_stmt_iterator i;
8929 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8931 gimple *g = gsi_stmt (i);
8933 switch (gimple_code (g))
8935 case GIMPLE_BIND:
8936 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8937 break;
8938 case GIMPLE_TRY:
8939 do_warn_unused_result (gimple_try_eval (g));
8940 do_warn_unused_result (gimple_try_cleanup (g));
8941 break;
8942 case GIMPLE_CATCH:
8943 do_warn_unused_result (gimple_catch_handler (
8944 as_a <gcatch *> (g)));
8945 break;
8946 case GIMPLE_EH_FILTER:
8947 do_warn_unused_result (gimple_eh_filter_failure (g));
8948 break;
8950 case GIMPLE_CALL:
8951 if (gimple_call_lhs (g))
8952 break;
8953 if (gimple_call_internal_p (g))
8954 break;
8956 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8957 LHS. All calls whose value is ignored should be
8958 represented like this. Look for the attribute. */
8959 fdecl = gimple_call_fndecl (g);
8960 ftype = gimple_call_fntype (g);
8962 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8964 location_t loc = gimple_location (g);
8966 if (fdecl)
8967 warning_at (loc, OPT_Wunused_result,
8968 "ignoring return value of %qD, "
8969 "declared with attribute warn_unused_result",
8970 fdecl);
8971 else
8972 warning_at (loc, OPT_Wunused_result,
8973 "ignoring return value of function "
8974 "declared with attribute warn_unused_result");
8976 break;
8978 default:
8979 /* Not a container, not a call, or a call whose value is used. */
8980 break;
8985 namespace {
8987 const pass_data pass_data_warn_unused_result =
8989 GIMPLE_PASS, /* type */
8990 "*warn_unused_result", /* name */
8991 OPTGROUP_NONE, /* optinfo_flags */
8992 TV_NONE, /* tv_id */
8993 PROP_gimple_any, /* properties_required */
8994 0, /* properties_provided */
8995 0, /* properties_destroyed */
8996 0, /* todo_flags_start */
8997 0, /* todo_flags_finish */
9000 class pass_warn_unused_result : public gimple_opt_pass
9002 public:
9003 pass_warn_unused_result (gcc::context *ctxt)
9004 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9007 /* opt_pass methods: */
9008 virtual bool gate (function *) { return flag_warn_unused_result; }
9009 virtual unsigned int execute (function *)
9011 do_warn_unused_result (gimple_body (current_function_decl));
9012 return 0;
9015 }; // class pass_warn_unused_result
9017 } // anon namespace
9019 gimple_opt_pass *
9020 make_pass_warn_unused_result (gcc::context *ctxt)
9022 return new pass_warn_unused_result (ctxt);
9025 /* IPA passes, compilation of earlier functions or inlining
9026 might have changed some properties, such as marked functions nothrow,
9027 pure, const or noreturn.
9028 Remove redundant edges and basic blocks, and create new ones if necessary.
9030 This pass can't be executed as stand alone pass from pass manager, because
9031 in between inlining and this fixup the verify_flow_info would fail. */
9033 unsigned int
9034 execute_fixup_cfg (void)
9036 basic_block bb;
9037 gimple_stmt_iterator gsi;
9038 int todo = 0;
9039 gcov_type count_scale;
9040 edge e;
9041 edge_iterator ei;
9042 cgraph_node *node = cgraph_node::get (current_function_decl);
9044 count_scale
9045 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9047 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9048 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9049 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9051 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9052 e->count = apply_scale (e->count, count_scale);
9054 FOR_EACH_BB_FN (bb, cfun)
9056 bb->count = apply_scale (bb->count, count_scale);
9057 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9059 gimple *stmt = gsi_stmt (gsi);
9060 tree decl = is_gimple_call (stmt)
9061 ? gimple_call_fndecl (stmt)
9062 : NULL;
9063 if (decl)
9065 int flags = gimple_call_flags (stmt);
9066 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9068 if (gimple_purge_dead_abnormal_call_edges (bb))
9069 todo |= TODO_cleanup_cfg;
9071 if (gimple_in_ssa_p (cfun))
9073 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9074 update_stmt (stmt);
9078 if (flags & ECF_NORETURN
9079 && fixup_noreturn_call (stmt))
9080 todo |= TODO_cleanup_cfg;
9083 /* Remove stores to variables we marked write-only.
9084 Keep access when store has side effect, i.e. in case when source
9085 is volatile. */
9086 if (gimple_store_p (stmt)
9087 && !gimple_has_side_effects (stmt))
9089 tree lhs = get_base_address (gimple_get_lhs (stmt));
9091 if (VAR_P (lhs)
9092 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9093 && varpool_node::get (lhs)->writeonly)
9095 unlink_stmt_vdef (stmt);
9096 gsi_remove (&gsi, true);
9097 release_defs (stmt);
9098 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9099 continue;
9102 /* For calls we can simply remove LHS when it is known
9103 to be write-only. */
9104 if (is_gimple_call (stmt)
9105 && gimple_get_lhs (stmt))
9107 tree lhs = get_base_address (gimple_get_lhs (stmt));
9109 if (VAR_P (lhs)
9110 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9111 && varpool_node::get (lhs)->writeonly)
9113 gimple_call_set_lhs (stmt, NULL);
9114 update_stmt (stmt);
9115 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9119 if (maybe_clean_eh_stmt (stmt)
9120 && gimple_purge_dead_eh_edges (bb))
9121 todo |= TODO_cleanup_cfg;
9122 gsi_next (&gsi);
9125 FOR_EACH_EDGE (e, ei, bb->succs)
9126 e->count = apply_scale (e->count, count_scale);
9128 /* If we have a basic block with no successors that does not
9129 end with a control statement or a noreturn call end it with
9130 a call to __builtin_unreachable. This situation can occur
9131 when inlining a noreturn call that does in fact return. */
9132 if (EDGE_COUNT (bb->succs) == 0)
9134 gimple *stmt = last_stmt (bb);
9135 if (!stmt
9136 || (!is_ctrl_stmt (stmt)
9137 && (!is_gimple_call (stmt)
9138 || !gimple_call_noreturn_p (stmt))))
9140 if (stmt && is_gimple_call (stmt))
9141 gimple_call_set_ctrl_altering (stmt, false);
9142 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9143 stmt = gimple_build_call (fndecl, 0);
9144 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9145 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9146 if (!cfun->after_inlining)
9148 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9149 int freq
9150 = compute_call_stmt_bb_frequency (current_function_decl,
9151 bb);
9152 node->create_edge (cgraph_node::get_create (fndecl),
9153 call_stmt, bb->count, freq);
9158 if (count_scale != REG_BR_PROB_BASE)
9159 compute_function_frequency ();
9161 if (current_loops
9162 && (todo & TODO_cleanup_cfg))
9163 loops_state_set (LOOPS_NEED_FIXUP);
9165 return todo;
9168 namespace {
9170 const pass_data pass_data_fixup_cfg =
9172 GIMPLE_PASS, /* type */
9173 "fixup_cfg", /* name */
9174 OPTGROUP_NONE, /* optinfo_flags */
9175 TV_NONE, /* tv_id */
9176 PROP_cfg, /* properties_required */
9177 0, /* properties_provided */
9178 0, /* properties_destroyed */
9179 0, /* todo_flags_start */
9180 0, /* todo_flags_finish */
9183 class pass_fixup_cfg : public gimple_opt_pass
9185 public:
9186 pass_fixup_cfg (gcc::context *ctxt)
9187 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9190 /* opt_pass methods: */
9191 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9192 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9194 }; // class pass_fixup_cfg
9196 } // anon namespace
9198 gimple_opt_pass *
9199 make_pass_fixup_cfg (gcc::context *ctxt)
9201 return new pass_fixup_cfg (ctxt);
9204 /* Garbage collection support for edge_def. */
9206 extern void gt_ggc_mx (tree&);
9207 extern void gt_ggc_mx (gimple *&);
9208 extern void gt_ggc_mx (rtx&);
9209 extern void gt_ggc_mx (basic_block&);
9211 static void
9212 gt_ggc_mx (rtx_insn *& x)
9214 if (x)
9215 gt_ggc_mx_rtx_def ((void *) x);
9218 void
9219 gt_ggc_mx (edge_def *e)
9221 tree block = LOCATION_BLOCK (e->goto_locus);
9222 gt_ggc_mx (e->src);
9223 gt_ggc_mx (e->dest);
9224 if (current_ir_type () == IR_GIMPLE)
9225 gt_ggc_mx (e->insns.g);
9226 else
9227 gt_ggc_mx (e->insns.r);
9228 gt_ggc_mx (block);
9231 /* PCH support for edge_def. */
9233 extern void gt_pch_nx (tree&);
9234 extern void gt_pch_nx (gimple *&);
9235 extern void gt_pch_nx (rtx&);
9236 extern void gt_pch_nx (basic_block&);
9238 static void
9239 gt_pch_nx (rtx_insn *& x)
9241 if (x)
9242 gt_pch_nx_rtx_def ((void *) x);
9245 void
9246 gt_pch_nx (edge_def *e)
9248 tree block = LOCATION_BLOCK (e->goto_locus);
9249 gt_pch_nx (e->src);
9250 gt_pch_nx (e->dest);
9251 if (current_ir_type () == IR_GIMPLE)
9252 gt_pch_nx (e->insns.g);
9253 else
9254 gt_pch_nx (e->insns.r);
9255 gt_pch_nx (block);
9258 void
9259 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9261 tree block = LOCATION_BLOCK (e->goto_locus);
9262 op (&(e->src), cookie);
9263 op (&(e->dest), cookie);
9264 if (current_ir_type () == IR_GIMPLE)
9265 op (&(e->insns.g), cookie);
9266 else
9267 op (&(e->insns.r), cookie);
9268 op (&(block), cookie);
9271 #if CHECKING_P
9273 namespace selftest {
9275 /* Helper function for CFG selftests: create a dummy function decl
9276 and push it as cfun. */
9278 static tree
9279 push_fndecl (const char *name)
9281 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9282 /* FIXME: this uses input_location: */
9283 tree fndecl = build_fn_decl (name, fn_type);
9284 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9285 NULL_TREE, integer_type_node);
9286 DECL_RESULT (fndecl) = retval;
9287 push_struct_function (fndecl);
9288 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9289 ASSERT_TRUE (fun != NULL);
9290 init_empty_tree_cfg_for_function (fun);
9291 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9292 ASSERT_EQ (0, n_edges_for_fn (fun));
9293 return fndecl;
9296 /* These tests directly create CFGs.
9297 Compare with the static fns within tree-cfg.c:
9298 - build_gimple_cfg
9299 - make_blocks: calls create_basic_block (seq, bb);
9300 - make_edges. */
9302 /* Verify a simple cfg of the form:
9303 ENTRY -> A -> B -> C -> EXIT. */
9305 static void
9306 test_linear_chain ()
9308 gimple_register_cfg_hooks ();
9310 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9311 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9313 /* Create some empty blocks. */
9314 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9315 basic_block bb_b = create_empty_bb (bb_a);
9316 basic_block bb_c = create_empty_bb (bb_b);
9318 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9319 ASSERT_EQ (0, n_edges_for_fn (fun));
9321 /* Create some edges: a simple linear chain of BBs. */
9322 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9323 make_edge (bb_a, bb_b, 0);
9324 make_edge (bb_b, bb_c, 0);
9325 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9327 /* Verify the edges. */
9328 ASSERT_EQ (4, n_edges_for_fn (fun));
9329 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9330 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9331 ASSERT_EQ (1, bb_a->preds->length ());
9332 ASSERT_EQ (1, bb_a->succs->length ());
9333 ASSERT_EQ (1, bb_b->preds->length ());
9334 ASSERT_EQ (1, bb_b->succs->length ());
9335 ASSERT_EQ (1, bb_c->preds->length ());
9336 ASSERT_EQ (1, bb_c->succs->length ());
9337 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9338 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9340 /* Verify the dominance information
9341 Each BB in our simple chain should be dominated by the one before
9342 it. */
9343 calculate_dominance_info (CDI_DOMINATORS);
9344 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9345 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9346 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9347 ASSERT_EQ (1, dom_by_b.length ());
9348 ASSERT_EQ (bb_c, dom_by_b[0]);
9349 free_dominance_info (CDI_DOMINATORS);
9350 dom_by_b.release ();
9352 /* Similarly for post-dominance: each BB in our chain is post-dominated
9353 by the one after it. */
9354 calculate_dominance_info (CDI_POST_DOMINATORS);
9355 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9356 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9357 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9358 ASSERT_EQ (1, postdom_by_b.length ());
9359 ASSERT_EQ (bb_a, postdom_by_b[0]);
9360 free_dominance_info (CDI_POST_DOMINATORS);
9361 postdom_by_b.release ();
9363 pop_cfun ();
9366 /* Verify a simple CFG of the form:
9367 ENTRY
9371 /t \f
9377 EXIT. */
9379 static void
9380 test_diamond ()
9382 gimple_register_cfg_hooks ();
9384 tree fndecl = push_fndecl ("cfg_test_diamond");
9385 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9387 /* Create some empty blocks. */
9388 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9389 basic_block bb_b = create_empty_bb (bb_a);
9390 basic_block bb_c = create_empty_bb (bb_a);
9391 basic_block bb_d = create_empty_bb (bb_b);
9393 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9394 ASSERT_EQ (0, n_edges_for_fn (fun));
9396 /* Create the edges. */
9397 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9398 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9399 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9400 make_edge (bb_b, bb_d, 0);
9401 make_edge (bb_c, bb_d, 0);
9402 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9404 /* Verify the edges. */
9405 ASSERT_EQ (6, n_edges_for_fn (fun));
9406 ASSERT_EQ (1, bb_a->preds->length ());
9407 ASSERT_EQ (2, bb_a->succs->length ());
9408 ASSERT_EQ (1, bb_b->preds->length ());
9409 ASSERT_EQ (1, bb_b->succs->length ());
9410 ASSERT_EQ (1, bb_c->preds->length ());
9411 ASSERT_EQ (1, bb_c->succs->length ());
9412 ASSERT_EQ (2, bb_d->preds->length ());
9413 ASSERT_EQ (1, bb_d->succs->length ());
9415 /* Verify the dominance information. */
9416 calculate_dominance_info (CDI_DOMINATORS);
9417 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9418 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9419 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9420 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9421 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9422 dom_by_a.release ();
9423 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9424 ASSERT_EQ (0, dom_by_b.length ());
9425 dom_by_b.release ();
9426 free_dominance_info (CDI_DOMINATORS);
9428 /* Similarly for post-dominance. */
9429 calculate_dominance_info (CDI_POST_DOMINATORS);
9430 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9431 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9432 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9433 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9434 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9435 postdom_by_d.release ();
9436 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9437 ASSERT_EQ (0, postdom_by_b.length ());
9438 postdom_by_b.release ();
9439 free_dominance_info (CDI_POST_DOMINATORS);
9441 pop_cfun ();
9444 /* Verify that we can handle a CFG containing a "complete" aka
9445 fully-connected subgraph (where A B C D below all have edges
9446 pointing to each other node, also to themselves).
9447 e.g.:
9448 ENTRY EXIT
9454 A<--->B
9455 ^^ ^^
9456 | \ / |
9457 | X |
9458 | / \ |
9459 VV VV
9460 C<--->D
9463 static void
9464 test_fully_connected ()
9466 gimple_register_cfg_hooks ();
9468 tree fndecl = push_fndecl ("cfg_fully_connected");
9469 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9471 const int n = 4;
9473 /* Create some empty blocks. */
9474 auto_vec <basic_block> subgraph_nodes;
9475 for (int i = 0; i < n; i++)
9476 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9478 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9479 ASSERT_EQ (0, n_edges_for_fn (fun));
9481 /* Create the edges. */
9482 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9483 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9484 for (int i = 0; i < n; i++)
9485 for (int j = 0; j < n; j++)
9486 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9488 /* Verify the edges. */
9489 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9490 /* The first one is linked to ENTRY/EXIT as well as itself and
9491 everything else. */
9492 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9493 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9494 /* The other ones in the subgraph are linked to everything in
9495 the subgraph (including themselves). */
9496 for (int i = 1; i < n; i++)
9498 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9499 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9502 /* Verify the dominance information. */
9503 calculate_dominance_info (CDI_DOMINATORS);
9504 /* The initial block in the subgraph should be dominated by ENTRY. */
9505 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9506 get_immediate_dominator (CDI_DOMINATORS,
9507 subgraph_nodes[0]));
9508 /* Every other block in the subgraph should be dominated by the
9509 initial block. */
9510 for (int i = 1; i < n; i++)
9511 ASSERT_EQ (subgraph_nodes[0],
9512 get_immediate_dominator (CDI_DOMINATORS,
9513 subgraph_nodes[i]));
9514 free_dominance_info (CDI_DOMINATORS);
9516 /* Similarly for post-dominance. */
9517 calculate_dominance_info (CDI_POST_DOMINATORS);
9518 /* The initial block in the subgraph should be postdominated by EXIT. */
9519 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9520 get_immediate_dominator (CDI_POST_DOMINATORS,
9521 subgraph_nodes[0]));
9522 /* Every other block in the subgraph should be postdominated by the
9523 initial block, since that leads to EXIT. */
9524 for (int i = 1; i < n; i++)
9525 ASSERT_EQ (subgraph_nodes[0],
9526 get_immediate_dominator (CDI_POST_DOMINATORS,
9527 subgraph_nodes[i]));
9528 free_dominance_info (CDI_POST_DOMINATORS);
9530 pop_cfun ();
9533 /* Run all of the selftests within this file. */
9535 void
9536 tree_cfg_c_tests ()
9538 test_linear_chain ();
9539 test_diamond ();
9540 test_fully_connected ();
9543 } // namespace selftest
9545 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9546 - loop
9547 - nested loops
9548 - switch statement (a block with many out-edges)
9549 - something that jumps to itself
9550 - etc */
9552 #endif /* CHECKING_P */